gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.vulkan; import javax.annotation.*; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * Structure describing the shader demote to helper invocations features that can be supported by an implementation. * * <h5>Description</h5> * * <p>If the {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} structure is included in the {@code pNext} chain of the {@link VkPhysicalDeviceFeatures2} structure passed to {@link VK11#vkGetPhysicalDeviceFeatures2 GetPhysicalDeviceFeatures2}, it is filled in to indicate whether each corresponding feature is supported. {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} <b>can</b> also be used in the {@code pNext} chain of {@link VkDeviceCreateInfo} to selectively enable these features.</p> * * <h5>Valid Usage (Implicit)</h5> * * <ul> * <li>{@code sType} <b>must</b> be {@link VK13#VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES}</li> * </ul> * * <h3>Layout</h3> * * <pre><code> * struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures { * VkStructureType {@link #sType}; * void * {@link #pNext}; * VkBool32 {@link #shaderDemoteToHelperInvocation}; * }</code></pre> */ public class VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures extends Struct implements NativeResource { /** The struct size in bytes. */ public static final int SIZEOF; /** The struct alignment in bytes. */ public static final int ALIGNOF; /** The struct member offsets. */ public static final int STYPE, PNEXT, SHADERDEMOTETOHELPERINVOCATION; static { Layout layout = __struct( __member(4), __member(POINTER_SIZE), __member(4) ); SIZEOF = layout.getSize(); ALIGNOF = layout.getAlignment(); STYPE = layout.offsetof(0); PNEXT = layout.offsetof(1); SHADERDEMOTETOHELPERINVOCATION = layout.offsetof(2); } /** * Creates a {@code VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures(ByteBuffer container) { super(memAddress(container), __checkContainer(container, SIZEOF)); } @Override public int sizeof() { return SIZEOF; } /** the type of this structure. */ @NativeType("VkStructureType") public int sType() { return nsType(address()); } /** {@code NULL} or a pointer to a structure extending this structure. */ @NativeType("void *") public long pNext() { return npNext(address()); } /** indicates whether the implementation supports the SPIR-V {@code DemoteToHelperInvocationEXT} capability. */ @NativeType("VkBool32") public boolean shaderDemoteToHelperInvocation() { return nshaderDemoteToHelperInvocation(address()) != 0; } /** Sets the specified value to the {@link #sType} field. */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; } /** Sets the {@link VK13#VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES} value to the {@link #sType} field. */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures sType$Default() { return sType(VK13.VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES); } /** Sets the specified value to the {@link #pNext} field. */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures pNext(@NativeType("void *") long value) { npNext(address(), value); return this; } /** Sets the specified value to the {@link #shaderDemoteToHelperInvocation} field. */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures shaderDemoteToHelperInvocation(@NativeType("VkBool32") boolean value) { nshaderDemoteToHelperInvocation(address(), value ? 1 : 0); return this; } /** Initializes this struct with the specified values. */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures set( int sType, long pNext, boolean shaderDemoteToHelperInvocation ) { sType(sType); pNext(pNext); shaderDemoteToHelperInvocation(shaderDemoteToHelperInvocation); return this; } /** * Copies the specified struct data to this struct. * * @param src the source struct * * @return this struct */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures set(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures src) { memCopy(src.address(), address(), SIZEOF); return this; } // ----------------------------------- /** Returns a new {@code VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures malloc() { return wrap(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.class, nmemAllocChecked(SIZEOF)); } /** Returns a new {@code VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures calloc() { return wrap(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.class, nmemCallocChecked(1, SIZEOF)); } /** Returns a new {@code VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} instance allocated with {@link BufferUtils}. */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures create() { ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF); return wrap(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.class, memAddress(container), container); } /** Returns a new {@code VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} instance for the specified memory address. */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures create(long address) { return wrap(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.class, address); } /** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures createSafe(long address) { return address == NULL ? null : wrap(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.class, address); } /** * Returns a new {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer malloc(int capacity) { return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity); } /** * Returns a new {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer calloc(int capacity) { return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity); } /** * Returns a new {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer create(int capacity) { ByteBuffer container = __create(capacity, SIZEOF); return wrap(Buffer.class, memAddress(container), capacity, container); } /** * Create a {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer create(long address, int capacity) { return wrap(Buffer.class, address, capacity); } /** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer createSafe(long address, int capacity) { return address == NULL ? null : wrap(Buffer.class, address, capacity); } /** * Returns a new {@code VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures malloc(MemoryStack stack) { return wrap(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.class, stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@code VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures calloc(MemoryStack stack) { return wrap(VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.class, stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer malloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer calloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** Unsafe version of {@link #sType}. */ public static int nsType(long struct) { return UNSAFE.getInt(null, struct + VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.STYPE); } /** Unsafe version of {@link #pNext}. */ public static long npNext(long struct) { return memGetAddress(struct + VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.PNEXT); } /** Unsafe version of {@link #shaderDemoteToHelperInvocation}. */ public static int nshaderDemoteToHelperInvocation(long struct) { return UNSAFE.getInt(null, struct + VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.SHADERDEMOTETOHELPERINVOCATION); } /** Unsafe version of {@link #sType(int) sType}. */ public static void nsType(long struct, int value) { UNSAFE.putInt(null, struct + VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.STYPE, value); } /** Unsafe version of {@link #pNext(long) pNext}. */ public static void npNext(long struct, long value) { memPutAddress(struct + VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.PNEXT, value); } /** Unsafe version of {@link #shaderDemoteToHelperInvocation(boolean) shaderDemoteToHelperInvocation}. */ public static void nshaderDemoteToHelperInvocation(long struct, int value) { UNSAFE.putInt(null, struct + VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.SHADERDEMOTETOHELPERINVOCATION, value); } // ----------------------------------- /** An array of {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures} structs. */ public static class Buffer extends StructBuffer<VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures, Buffer> implements NativeResource { private static final VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures ELEMENT_FACTORY = VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.create(-1L); /** * Creates a new {@code VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container, container.remaining() / SIZEOF); } public Buffer(long address, int cap) { super(address, null, -1, 0, cap, cap); } Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures getElementFactory() { return ELEMENT_FACTORY; } /** @return the value of the {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures#sType} field. */ @NativeType("VkStructureType") public int sType() { return VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.nsType(address()); } /** @return the value of the {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures#pNext} field. */ @NativeType("void *") public long pNext() { return VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.npNext(address()); } /** @return the value of the {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures#shaderDemoteToHelperInvocation} field. */ @NativeType("VkBool32") public boolean shaderDemoteToHelperInvocation() { return VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.nshaderDemoteToHelperInvocation(address()) != 0; } /** Sets the specified value to the {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures#sType} field. */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer sType(@NativeType("VkStructureType") int value) { VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.nsType(address(), value); return this; } /** Sets the {@link VK13#VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES} value to the {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures#sType} field. */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer sType$Default() { return sType(VK13.VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES); } /** Sets the specified value to the {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures#pNext} field. */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer pNext(@NativeType("void *") long value) { VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.npNext(address(), value); return this; } /** Sets the specified value to the {@link VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures#shaderDemoteToHelperInvocation} field. */ public VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.Buffer shaderDemoteToHelperInvocation(@NativeType("VkBool32") boolean value) { VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.nshaderDemoteToHelperInvocation(address(), value ? 1 : 0); return this; } } }
/* * Copyright (c) 2010-2015 William Bittle http://www.dyn4j.org/ * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, this list of conditions * and the following disclaimer in the documentation and/or other materials provided with the * distribution. * * Neither the name of dyn4j nor the names of its contributors may be used to endorse or * promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.dyn4j.samples; import java.awt.Graphics2D; import org.dyn4j.dynamics.Body; import org.dyn4j.dynamics.BodyFixture; import org.dyn4j.dynamics.joint.RevoluteJoint; import org.dyn4j.geometry.Convex; import org.dyn4j.geometry.Geometry; import org.dyn4j.geometry.MassType; import org.dyn4j.geometry.Vector2; /** * A somewhat complex scene with a ragdoll. * @author William Bittle * @since 3.2.1 * @version 3.2.0 */ public class Ragdoll extends SimulationFrame { /** The serial version id */ private static final long serialVersionUID = -2350301592218819726L; /** * Default constructor. */ public Ragdoll() { super("Ragdoll", 64.0); } /** * Creates game objects and adds them to the world. */ protected void initializeWorld() { // Ground Body ground = new SimulationBody(); {// Fixture1 Convex c = Geometry.createRectangle(100.0, 1.0); BodyFixture bf = new BodyFixture(c); ground.addFixture(bf); } ground.translate(new Vector2(0.6875, -8.75)); ground.setMass(MassType.INFINITE); world.addBody(ground); // the ragdoll // Head Body head = new SimulationBody(); {// Fixture2 Convex c = Geometry.createCircle(0.25); BodyFixture bf = new BodyFixture(c); head.addFixture(bf); } head.setMass(MassType.NORMAL); world.addBody(head); // Torso Body torso = new SimulationBody(); {// Fixture4 Convex c = Geometry.createRectangle(0.5, 1.0); BodyFixture bf = new BodyFixture(c); torso.addFixture(bf); } {// Fixture16 Convex c = Geometry.createRectangle(1.0, 0.25); c.translate(new Vector2(0.00390625, 0.375)); BodyFixture bf = new BodyFixture(c); torso.addFixture(bf); } torso.translate(new Vector2(0.0234375, -0.8125)); torso.setMass(MassType.NORMAL); world.addBody(torso); // Right Humerus Body rightHumerus = new SimulationBody(); {// Fixture5 Convex c = Geometry.createRectangle(0.25, 0.5); BodyFixture bf = new BodyFixture(c); rightHumerus.addFixture(bf); } rightHumerus.translate(new Vector2(0.4375, -0.609375)); rightHumerus.setMass(MassType.NORMAL); world.addBody(rightHumerus); // Right Ulna Body rightUlna = new SimulationBody(); {// Fixture6 Convex c = Geometry.createRectangle(0.25, 0.4); BodyFixture bf = new BodyFixture(c); rightUlna.addFixture(bf); } rightUlna.translate(new Vector2(0.44140625, -0.98828125)); rightUlna.setMass(MassType.NORMAL); world.addBody(rightUlna); // Neck Body neck = new SimulationBody(); {// Fixture7 Convex c = Geometry.createRectangle(0.15, 0.2); BodyFixture bf = new BodyFixture(c); neck.addFixture(bf); } neck.translate(new Vector2(0.015625, -0.2734375)); neck.setMass(MassType.NORMAL); world.addBody(neck); // Left Humerus Body leftHumerus = new SimulationBody(); {// Fixture9 Convex c = Geometry.createRectangle(0.25, 0.5); BodyFixture bf = new BodyFixture(c); leftHumerus.addFixture(bf); } leftHumerus.translate(new Vector2(-0.3828125, -0.609375)); leftHumerus.setMass(MassType.NORMAL); world.addBody(leftHumerus); // Left Ulna Body leftUlna = new SimulationBody(); {// Fixture11 Convex c = Geometry.createRectangle(0.25, 0.4); BodyFixture bf = new BodyFixture(c); leftUlna.addFixture(bf); } leftUlna.translate(new Vector2(-0.3828125, -0.9765625)); leftUlna.setMass(MassType.NORMAL); world.addBody(leftUlna); // Right Femur Body rightFemur = new SimulationBody(); {// Fixture12 Convex c = Geometry.createRectangle(0.25, 0.75); BodyFixture bf = new BodyFixture(c); rightFemur.addFixture(bf); } rightFemur.translate(new Vector2(0.1796875, -1.5703125)); rightFemur.setMass(MassType.NORMAL); world.addBody(rightFemur); // Left Femur Body leftFemur = new SimulationBody(); {// Fixture13 Convex c = Geometry.createRectangle(0.25, 0.75); BodyFixture bf = new BodyFixture(c); leftFemur.addFixture(bf); } leftFemur.translate(new Vector2(-0.1328125, -1.5703125)); leftFemur.setMass(MassType.NORMAL); world.addBody(leftFemur); // Right Tibia Body rightTibia = new SimulationBody(); {// Fixture14 Convex c = Geometry.createRectangle(0.25, 0.5); BodyFixture bf = new BodyFixture(c); rightTibia.addFixture(bf); } rightTibia.translate(new Vector2(0.18359375, -2.11328125)); rightTibia.setMass(MassType.NORMAL); world.addBody(rightTibia); // Left Tibia Body leftTibia = new SimulationBody(); {// Fixture15 Convex c = Geometry.createRectangle(0.25, 0.5); BodyFixture bf = new BodyFixture(c); leftTibia.addFixture(bf); } leftTibia.translate(new Vector2(-0.1328125, -2.1171875)); leftTibia.setMass(MassType.NORMAL); world.addBody(leftTibia); // Head to Neck RevoluteJoint headToNeck = new RevoluteJoint(head, neck, new Vector2(0.01, -0.2)); headToNeck.setLimitEnabled(false); headToNeck.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); headToNeck.setReferenceAngle(Math.toRadians(0.0)); headToNeck.setMotorEnabled(false); headToNeck.setMotorSpeed(Math.toRadians(0.0)); headToNeck.setMaximumMotorTorque(0.0); headToNeck.setCollisionAllowed(false); world.addJoint(headToNeck); // Neck to Torso RevoluteJoint neckToTorso = new RevoluteJoint(neck, torso, new Vector2(0.01, -0.35)); neckToTorso.setLimitEnabled(false); neckToTorso.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); neckToTorso.setReferenceAngle(Math.toRadians(0.0)); neckToTorso.setMotorEnabled(false); neckToTorso.setMotorSpeed(Math.toRadians(0.0)); neckToTorso.setMaximumMotorTorque(0.0); neckToTorso.setCollisionAllowed(false); world.addJoint(neckToTorso); // Torso to Left Humerus RevoluteJoint torsoToLeftHumerus = new RevoluteJoint(torso, leftHumerus, new Vector2(-0.4, -0.4)); torsoToLeftHumerus.setLimitEnabled(false); torsoToLeftHumerus.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); torsoToLeftHumerus.setReferenceAngle(Math.toRadians(0.0)); torsoToLeftHumerus.setMotorEnabled(false); torsoToLeftHumerus.setMotorSpeed(Math.toRadians(0.0)); torsoToLeftHumerus.setMaximumMotorTorque(0.0); torsoToLeftHumerus.setCollisionAllowed(false); world.addJoint(torsoToLeftHumerus); // Torso to Right Humerus RevoluteJoint torsoToRightHumerus = new RevoluteJoint(torso, rightHumerus, new Vector2(0.4, -0.4)); torsoToRightHumerus.setLimitEnabled(false); torsoToRightHumerus.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); torsoToRightHumerus.setReferenceAngle(Math.toRadians(0.0)); torsoToRightHumerus.setMotorEnabled(false); torsoToRightHumerus.setMotorSpeed(Math.toRadians(0.0)); torsoToRightHumerus.setMaximumMotorTorque(0.0); torsoToRightHumerus.setCollisionAllowed(false); world.addJoint(torsoToRightHumerus); // Right Humerus to Right Ulna RevoluteJoint rightHumerusToRightUlna = new RevoluteJoint(rightHumerus, rightUlna, new Vector2(0.43, -0.82)); rightHumerusToRightUlna.setLimitEnabled(false); rightHumerusToRightUlna.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); rightHumerusToRightUlna.setReferenceAngle(Math.toRadians(0.0)); rightHumerusToRightUlna.setMotorEnabled(false); rightHumerusToRightUlna.setMotorSpeed(Math.toRadians(0.0)); rightHumerusToRightUlna.setMaximumMotorTorque(0.0); rightHumerusToRightUlna.setCollisionAllowed(false); world.addJoint(rightHumerusToRightUlna); // Left Humerus to Left Ulna RevoluteJoint leftHumerusToLeftUlna = new RevoluteJoint(leftHumerus, leftUlna, new Vector2(-0.4, -0.81)); leftHumerusToLeftUlna.setLimitEnabled(false); leftHumerusToLeftUlna.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); leftHumerusToLeftUlna.setReferenceAngle(Math.toRadians(0.0)); leftHumerusToLeftUlna.setMotorEnabled(false); leftHumerusToLeftUlna.setMotorSpeed(Math.toRadians(0.0)); leftHumerusToLeftUlna.setMaximumMotorTorque(0.0); leftHumerusToLeftUlna.setCollisionAllowed(false); world.addJoint(leftHumerusToLeftUlna); // Torso to Right Femur RevoluteJoint torsoToRightFemur = new RevoluteJoint(torso, rightFemur, new Vector2(0.16, -1.25)); torsoToRightFemur.setLimitEnabled(false); torsoToRightFemur.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); torsoToRightFemur.setReferenceAngle(Math.toRadians(0.0)); torsoToRightFemur.setMotorEnabled(false); torsoToRightFemur.setMotorSpeed(Math.toRadians(0.0)); torsoToRightFemur.setMaximumMotorTorque(0.0); torsoToRightFemur.setCollisionAllowed(false); world.addJoint(torsoToRightFemur); // Torso to Left Femur RevoluteJoint torsoToLeftFemur = new RevoluteJoint(torso, leftFemur, new Vector2(-0.13, -1.25)); torsoToLeftFemur.setLimitEnabled(false); torsoToLeftFemur.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); torsoToLeftFemur.setReferenceAngle(Math.toRadians(0.0)); torsoToLeftFemur.setMotorEnabled(false); torsoToLeftFemur.setMotorSpeed(Math.toRadians(0.0)); torsoToLeftFemur.setMaximumMotorTorque(0.0); torsoToLeftFemur.setCollisionAllowed(false); world.addJoint(torsoToLeftFemur); // Right Femur to Right Tibia RevoluteJoint rightFemurToRightTibia = new RevoluteJoint(rightFemur, rightTibia, new Vector2(0.17, -1.9)); rightFemurToRightTibia.setLimitEnabled(false); rightFemurToRightTibia.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); rightFemurToRightTibia.setReferenceAngle(Math.toRadians(0.0)); rightFemurToRightTibia.setMotorEnabled(false); rightFemurToRightTibia.setMotorSpeed(Math.toRadians(0.0)); rightFemurToRightTibia.setMaximumMotorTorque(0.0); rightFemurToRightTibia.setCollisionAllowed(false); world.addJoint(rightFemurToRightTibia); // Left Femur to Left Tibia RevoluteJoint leftFemurToLeftTibia = new RevoluteJoint(leftFemur, leftTibia, new Vector2(-0.14, -1.9)); leftFemurToLeftTibia.setLimitEnabled(false); leftFemurToLeftTibia.setLimits(Math.toRadians(0.0), Math.toRadians(0.0)); leftFemurToLeftTibia.setReferenceAngle(Math.toRadians(0.0)); leftFemurToLeftTibia.setMotorEnabled(false); leftFemurToLeftTibia.setMotorSpeed(Math.toRadians(0.0)); leftFemurToLeftTibia.setMaximumMotorTorque(0.0); leftFemurToLeftTibia.setCollisionAllowed(false); world.addJoint(leftFemurToLeftTibia); } /* (non-Javadoc) * @see org.dyn4j.samples.SimulationFrame#render(java.awt.Graphics2D, double) */ @Override protected void render(Graphics2D g, double elapsedTime) { // move the view a bit g.translate(0, 300); super.render(g, elapsedTime); } /** * Entry point for the example application. * @param args command line arguments */ public static void main(String[] args) { Ragdoll simulation = new Ragdoll(); simulation.run(); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.siyeh.ig.controlflow; import com.intellij.codeInspection.*; import com.intellij.codeInspection.dataFlow.SpecialField; import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet; import com.intellij.codeInspection.dataFlow.value.DfaRelationValue; import com.intellij.codeInspection.dataFlow.value.DfaValueFactory; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.PsiUtil; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.siyeh.InspectionGadgetsBundle; import com.siyeh.ig.callMatcher.CallMatcher; import com.siyeh.ig.psiutils.BoolUtils; import com.siyeh.ig.psiutils.CommentTracker; import com.siyeh.ig.psiutils.EquivalenceChecker; import com.siyeh.ig.psiutils.JavaPsiMathUtil; import one.util.streamex.StreamEx; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; import java.util.function.BinaryOperator; import static com.intellij.psi.CommonClassNames.*; import static com.siyeh.ig.callMatcher.CallMatcher.anyOf; import static com.siyeh.ig.callMatcher.CallMatcher.instanceCall; public class ExcessiveRangeCheckInspection extends AbstractBaseJavaLocalInspectionTool { private static final CallMatcher COLLECTION_IS_EMPTY = anyOf(instanceCall(JAVA_UTIL_COLLECTION, "isEmpty").parameterCount(0), instanceCall(JAVA_UTIL_MAP, "isEmpty").parameterCount(0)); private static final CallMatcher STRING_IS_EMPTY = instanceCall(JAVA_LANG_STRING, "isEmpty").parameterCount(0); @NotNull @Override public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) { return new JavaElementVisitor() { @Override public void visitPolyadicExpression(PsiPolyadicExpression expression) { IElementType type = expression.getOperationTokenType(); boolean andChain = type.equals(JavaTokenType.ANDAND); if (!andChain && !type.equals(JavaTokenType.OROR)) return; for (List<RangeConstraint> run : StreamEx.of(expression.getOperands()).map(ExcessiveRangeCheckInspection::extractConstraint) .groupRuns(RangeConstraint::sameExpression)) { if (run.size() <= 1) continue; BinaryOperator<LongRangeSet> reductionOp = andChain ? LongRangeSet::intersect : LongRangeSet::unite; LongRangeSet set = run.stream().map(c -> c.myConstraint).reduce(reductionOp).orElse(LongRangeSet.empty()); if (set.isEmpty()) continue; RangeConstraint constraint = run.get(0); if (!andChain) { set = constraint.getFullRange().subtract(set); } if (!set.isEmpty() && set.min() == set.max()) { String text = constraint.myExpression.getText() + constraint.getExpressionSuffix(); String replacement = text + ' ' + (andChain ? "==" : "!=") + ' ' + set.min(); String message = InspectionGadgetsBundle.message("inspection.excessive.range.check.message", replacement); holder.registerProblem(expression, new TextRange(constraint.myRange.getStartOffset(), run.get(run.size() - 1).myRange.getEndOffset()), message, new ExcessiveRangeCheckFix(replacement)); } } } }; } private static RangeConstraint extractConstraint(PsiExpression expression) { TextRange textRange = expression.getTextRangeInParent(); expression = PsiUtil.skipParenthesizedExprDown(expression); if (expression == null) return null; PsiExpression negated = BoolUtils.getNegated(expression); if (negated != null) { RangeConstraint constraint = extractConstraint(negated); return constraint == null ? null : constraint.negate(textRange); } if (expression instanceof PsiMethodCallExpression) { PsiExpression qualifier = ((PsiMethodCallExpression)expression).getMethodExpression().getQualifierExpression(); if (qualifier != null) { if (STRING_IS_EMPTY.matches(expression)) { return new RangeConstraint(textRange, qualifier, SpecialField.STRING_LENGTH, LongRangeSet.point(0)); } else if (COLLECTION_IS_EMPTY.matches(expression)) { return new RangeConstraint(textRange, qualifier, SpecialField.COLLECTION_SIZE, LongRangeSet.point(0)); } } } if (expression instanceof PsiBinaryExpression) { PsiBinaryExpression binOp = (PsiBinaryExpression)expression; DfaRelationValue.RelationType rel = DfaRelationValue.RelationType.fromElementType(binOp.getOperationTokenType()); if (rel == null) return null; PsiExpression left = PsiUtil.skipParenthesizedExprDown(binOp.getLOperand()); PsiExpression right = PsiUtil.skipParenthesizedExprDown(binOp.getROperand()); if (left == null || right == null) return null; if (!TypeConversionUtil.isIntegralNumberType(left.getType()) || !TypeConversionUtil.isIntegralNumberType(right.getType())) { return null; } Number leftNum = JavaPsiMathUtil.getNumberFromLiteral(left); Number rightNum = JavaPsiMathUtil.getNumberFromLiteral(right); LongRangeSet set; PsiExpression compared; if (leftNum instanceof Integer || leftNum instanceof Long) { set = LongRangeSet.point(leftNum.longValue()).fromRelation(rel.getFlipped()); compared = right; } else if (rightNum instanceof Integer || rightNum instanceof Long) { set = LongRangeSet.point(rightNum.longValue()).fromRelation(rel); compared = left; } else { return null; } return RangeConstraint.create(textRange, compared, set); } return null; } private static class RangeConstraint { private final @NotNull TextRange myRange; private final @NotNull PsiExpression myExpression; private final @Nullable SpecialField myField; private final @NotNull LongRangeSet myConstraint; private RangeConstraint(@NotNull TextRange range, @NotNull PsiExpression expression, @Nullable SpecialField field, @NotNull LongRangeSet constraint) { myRange = range; myExpression = expression; myField = field; myConstraint = constraint.intersect(getFullRange()); } RangeConstraint negate(TextRange newTextRange) { return new RangeConstraint(newTextRange, myExpression, myField, getFullRange().subtract(myConstraint)); } static boolean sameExpression(RangeConstraint left, RangeConstraint right) { return left != null && right != null && left.myField == right.myField && EquivalenceChecker.getCanonicalPsiEquivalence().expressionsAreEquivalent(left.myExpression, right.myExpression); } @NotNull LongRangeSet getFullRange() { LongRangeSet result; if (myField != null) { DfaValueFactory factory = new DfaValueFactory(null, false); result = LongRangeSet.fromDfaValue(myField.getDefaultValue(factory, false)); } else { result = LongRangeSet.fromType(myExpression.getType()); } return result == null ? LongRangeSet.all() : result; } String getExpressionSuffix() { if (myField == null) return ""; switch (myField) { case ARRAY_LENGTH: return ".length"; case STRING_LENGTH: return ".length()"; case COLLECTION_SIZE: return ".size()"; default: return ""; } } @NotNull static RangeConstraint create(TextRange textRange, PsiExpression expr, LongRangeSet set) { SpecialField field = null; PsiReferenceExpression ref = expr instanceof PsiReferenceExpression ? (PsiReferenceExpression)expr : expr instanceof PsiMethodCallExpression ? ((PsiMethodCallExpression)expr).getMethodExpression() : null; if (ref != null) { PsiExpression qualifier = ref.getQualifierExpression(); if (qualifier != null) { field = SpecialField.findSpecialField(ref.resolve()); if (field != null) { expr = qualifier; } } } return new RangeConstraint(textRange, expr, field, set); } } private static class ExcessiveRangeCheckFix implements LocalQuickFix { private final String myReplacement; ExcessiveRangeCheckFix(String replacement) { myReplacement = replacement; } @Nls(capitalization = Nls.Capitalization.Sentence) @NotNull @Override public String getName() { return CommonQuickFixBundle.message("fix.replace.with.x", myReplacement); } @Nls(capitalization = Nls.Capitalization.Sentence) @NotNull @Override public String getFamilyName() { return InspectionGadgetsBundle.message("inspection.excessive.range.check.fix.family.name"); } @Override public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) { PsiPolyadicExpression expression = ObjectUtils.tryCast(descriptor.getStartElement(), PsiPolyadicExpression.class); if (expression == null) return; TextRange range = descriptor.getTextRangeInElement(); PsiExpression[] allOperands = expression.getOperands(); List<PsiExpression> operands = ContainerUtil.filter(allOperands, op -> range.contains(op.getTextRangeInParent())); if (operands.size() < 2) return; PsiExpression firstOperand = operands.get(0); RangeConstraint constraint = extractConstraint(firstOperand); if (constraint == null) return; CommentTracker ct = new CommentTracker(); ct.markUnchanged(constraint.myExpression); if (operands.size() == allOperands.length) { ct.replaceAndRestoreComments(expression, myReplacement); } else { PsiElement firstToDelete = firstOperand.getNextSibling(); PsiElement lastToDelete = ContainerUtil.getLastItem(operands); for(PsiElement e = firstToDelete; e != lastToDelete; e = e.getNextSibling()) { ct.grabComments(e); } expression.deleteChildRange(firstToDelete, lastToDelete); ct.replaceAndRestoreComments(firstOperand, myReplacement); } } } }
package com.xiledsystems.AlternateJavaBridgelib; import android.text.method.PasswordTransformationMethod; import android.view.View; import android.view.View.OnFocusChangeListener; import android.widget.EditText; import com.google.devtools.simple.common.ComponentConstants; import com.google.devtools.simple.runtime.components.Component; import com.google.devtools.simple.runtime.components.android.AndroidViewComponent; import com.google.devtools.simple.runtime.components.android.ComponentContainer; import com.google.devtools.simple.runtime.components.android.Form; import com.google.devtools.simple.runtime.components.android.util.TextViewUtil; import com.google.devtools.simple.runtime.events.EventDispatcher; public class PasswordTextBox2 extends AndroidViewComponent implements OnFocusChangeListener, OnInitializeListener { private final EditText view; // Backing for text alignment private int textAlignment; // Backing for background color private int backgroundColor; // Backing for font typeface private int fontTypeface; // Backing for font bold private boolean bold; // Backing for font italic private boolean italic; // Backing for hint text private String hint; // Backing for text color private int textColor; private double widthMultiplier; private double heightMultiplier; private boolean autoResize= false; private final Form form; /** * Creates a new TextBox component. * * @param container container, component will be placed in */ public PasswordTextBox2(ComponentContainer container) { super(container); form = container.$form(); if (form instanceof Form2) { ((Form2) form).registerForOnInitialize(this); } view = new EditText(container.$context()); // Listen to focus changes view.setOnFocusChangeListener(this); // Add a transformation method to hide password text. view.setTransformationMethod(new PasswordTransformationMethod()); // Adds the component to its designated container container.$add(this); container.setChildWidth(this, ComponentConstants.TEXTBOX_PREFERRED_WIDTH); // Initialization of default property values. TextAlignment(Component.ALIGNMENT_NORMAL); BackgroundColor(Component.COLOR_NONE); Enabled(true); fontTypeface = Component.TYPEFACE_DEFAULT; TextViewUtil.setFontTypeface(view, fontTypeface, bold, italic); FontSize(Component.FONT_DEFAULT_SIZE); Hint(""); Text(""); TextColor(Component.COLOR_BLACK); } @Override public View getView() { return view; } /** * Default GotFocus event handler. */ public void GotFocus() { EventDispatcher.dispatchEvent(this, "GotFocus"); } /** * Default LostFocus event handler. */ public void LostFocus() { EventDispatcher.dispatchEvent(this, "LostFocus"); } /** * Returns the alignment of the password textbox's text: center, normal * (e.g., left-justified if text is written left to right), or * opposite (e.g., right-justified if text is written left to right). * * @return one of {@link Component#ALIGNMENT_NORMAL}, * {@link Component#ALIGNMENT_CENTER} or * {@link Component#ALIGNMENT_OPPOSITE} */ public int TextAlignment() { return textAlignment; } /** * Specifies the alignment of the password textbox's text: center, normal * (e.g., left-justified if text is written left to right), or * opposite (e.g., right-justified if text is written left to right). * * @param alignment one of {@link Component#ALIGNMENT_NORMAL}, * {@link Component#ALIGNMENT_CENTER} or * {@link Component#ALIGNMENT_OPPOSITE} */ public void TextAlignment(int alignment) { this.textAlignment = alignment; TextViewUtil.setAlignment(view, alignment, false); } /** * Returns the password textbox's background color as an alpha-red-green-blue * integer. * * @return background RGB color with alpha */ public int BackgroundColor() { return backgroundColor; } /** * Specifies the password textbox's background color as an alpha-red-green-blue * integer. * * @param argb background RGB color with alpha */ public void BackgroundColor(int argb) { backgroundColor = argb; if (argb != Component.COLOR_DEFAULT) { TextViewUtil.setBackgroundColor(view, argb); } else { TextViewUtil.setBackgroundColor(view, Component.COLOR_NONE); } } /** * Returns true if the password textbox is active and useable. * * @return {@code true} indicates enabled, {@code false} disabled */ public boolean Enabled() { return TextViewUtil.isEnabled(view); } /** * Specifies whether the password textbox should be active and useable. * * @param enabled {@code true} for enabled, {@code false} disabled */ public void Enabled(boolean enabled) { TextViewUtil.setEnabled(view, enabled); } /** * Returns true if the password textbox's text should be bold. * If bold has been requested, this property will return true, even if the * font does not support bold. * * @return {@code true} indicates bold, {@code false} normal */ public boolean FontBold() { return bold; } /** * Specifies whether the password textbox's text should be bold. * Some fonts do not support bold. * * @param bold {@code true} indicates bold, {@code false} normal */ public void FontBold(boolean bold) { this.bold = bold; TextViewUtil.setFontTypeface(view, fontTypeface, bold, italic); } /** * Returns true if the password textbox's text should be italic. * If italic has been requested, this property will return true, even if the * font does not support italic. * * @return {@code true} indicates italic, {@code false} normal */ public boolean FontItalic() { return italic; } /** * Specifies whether the password textbox's text should be italic. * Some fonts do not support italic. * * @param italic {@code true} indicates italic, {@code false} normal */ public void FontItalic(boolean italic) { this.italic = italic; TextViewUtil.setFontTypeface(view, fontTypeface, bold, italic); } /** * Returns the password textbox's text's font size, measured in pixels. * * @return font size in pixel */ public float FontSize() { return TextViewUtil.getFontSize(view); } /** * Specifies the password textbox's text's font size, measured in pixels. * * @param size font size in pixel */ public void FontSize(float size) { TextViewUtil.setFontSize(view, size); } /** * Returns the password textbox's text's font face as default, serif, sans * serif, or monospace. * * @return one of {@link Component#TYPEFACE_DEFAULT}, * {@link Component#TYPEFACE_SERIF}, * {@link Component#TYPEFACE_SANSSERIF} or * {@link Component#TYPEFACE_MONOSPACE} */ public int FontTypeface() { return fontTypeface; } /** * Specifies the password textbox's text's font face as default, serif, sans * serif, or monospace. * * @param typeface one of {@link Component#TYPEFACE_DEFAULT}, * {@link Component#TYPEFACE_SERIF}, * {@link Component#TYPEFACE_SANSSERIF} or * {@link Component#TYPEFACE_MONOSPACE} */ public void FontTypeface(int typeface) { fontTypeface = typeface; TextViewUtil.setFontTypeface(view, fontTypeface, bold, italic); } /** * Hint property getter method. * * @return hint text */ public String Hint() { return hint; } /** * Hint property setter method. * * @param hint hint text */ public void Hint(String hint) { this.hint = hint; view.setHint(hint); view.invalidate(); } /** * Returns the password textbox contents. * * @return text box contents */ public String Text() { return TextViewUtil.getText(view); } /** * Specifies the password textbox contents. * * @param text new text in text box */ public void Text(String text) { TextViewUtil.setText(view, text); } /** * Returns the password textbox's text color as an alpha-red-green-blue * integer. * * @return text RGB color with alpha */ public int TextColor() { return textColor; } /** * Specifies the password textbox's text color as an alpha-red-green-blue * integer. * * @param argb text RGB color with alpha */ public void TextColor(int argb) { textColor = argb; if (argb != Component.COLOR_DEFAULT) { TextViewUtil.setTextColor(view, argb); } else { TextViewUtil.setTextColor(view, Component.COLOR_BLACK); } } // OnFocusChangeListener implementation @Override public void onFocusChange(View previouslyFocused, boolean gainFocus) { if (gainFocus) { GotFocus(); } else { LostFocus(); } } @Override public void onInitialize() { if (autoResize) { this.Width((int) (((Form2) form).scrnWidth * widthMultiplier)); this.Height((int) (((Form2) form).scrnHeight * heightMultiplier)); } } public void setMultipliers(double widthmultiplier, double heightmultiplier) { autoResize=true; this.widthMultiplier = widthmultiplier; this.heightMultiplier = heightmultiplier; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.standard; import org.apache.nifi.annotation.behavior.EventDriven; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SideEffectFree; import org.apache.nifi.annotation.behavior.SupportsBatching; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.SeeAlso; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.stream.io.StreamUtils; import org.apache.nifi.syslog.keyproviders.SyslogPrefixedKeyProvider; import org.apache.nifi.syslog.utils.NifiStructuredDataPolicy; import org.apache.nifi.syslog.utils.NilHandlingPolicy; import org.apache.nifi.syslog.parsers.StrictSyslog5424Parser; import org.apache.nifi.syslog.events.Syslog5424Event; import org.apache.nifi.syslog.attributes.SyslogAttributes; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @EventDriven @SideEffectFree @SupportsBatching @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"logs", "syslog", "syslog5424", "attributes", "system", "event", "message"}) @CapabilityDescription("Attempts to parse the contents of a well formed Syslog message in accordance to RFC5424 " + "format and adds attributes to the FlowFile for each of the parts of the Syslog message, including Structured Data." + "Structured Data will be written to attributes as one attribute per item id + parameter "+ "see https://tools.ietf.org/html/rfc5424." + "Note: ParseSyslog5424 follows the specification more closely than ParseSyslog. If your Syslog producer " + "does not follow the spec closely, with regards to using '-' for missing header entries for example, those logs " + "will fail with this parser, where they would not fail with ParseSyslog.") @WritesAttributes({@WritesAttribute(attribute = "syslog.priority", description = "The priority of the Syslog message."), @WritesAttribute(attribute = "syslog.severity", description = "The severity of the Syslog message derived from the priority."), @WritesAttribute(attribute = "syslog.facility", description = "The facility of the Syslog message derived from the priority."), @WritesAttribute(attribute = "syslog.version", description = "The optional version from the Syslog message."), @WritesAttribute(attribute = "syslog.timestamp", description = "The timestamp of the Syslog message."), @WritesAttribute(attribute = "syslog.hostname", description = "The hostname or IP address of the Syslog message."), @WritesAttribute(attribute = "syslog.appname", description = "The appname of the Syslog message."), @WritesAttribute(attribute = "syslog.procid", description = "The procid of the Syslog message."), @WritesAttribute(attribute = "syslog.messageid", description = "The messageid the Syslog message."), @WritesAttribute(attribute = "syslog.structuredData", description = "Multiple entries per structuredData of the Syslog message."), @WritesAttribute(attribute = "syslog.sender", description = "The hostname of the Syslog server that sent the message."), @WritesAttribute(attribute = "syslog.body", description = "The body of the Syslog message, everything after the hostname.")}) @SeeAlso({ListenSyslog.class, ParseSyslog.class, PutSyslog.class}) public class ParseSyslog5424 extends AbstractProcessor { public static final AllowableValue OMIT = new AllowableValue(NilHandlingPolicy.OMIT.name(),NilHandlingPolicy.OMIT.name(),"The missing field will not have an attribute added."); public static final AllowableValue NULL = new AllowableValue(NilHandlingPolicy.NULL.name(),NilHandlingPolicy.NULL.name(),"The missing field will have an empty attribute added."); public static final AllowableValue DASH = new AllowableValue(NilHandlingPolicy.DASH.name(),NilHandlingPolicy.DASH.name(),"The missing field will have an attribute added with the value of '-'."); public static final PropertyDescriptor CHARSET = new PropertyDescriptor.Builder() .name("Character Set") .description("Specifies which character set of the Syslog messages") .required(true) .defaultValue("UTF-8") .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR) .build(); public static final PropertyDescriptor NIL_POLICY = new PropertyDescriptor.Builder() .name("nil_policy") .displayName("NIL Policy") .description("Defines how NIL values are handled for header fields.") .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .allowableValues(OMIT,NULL,DASH) .required(true) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .defaultValue(NULL.getValue()) .build(); public static final PropertyDescriptor INCLUDE_BODY_IN_ATTRIBUTES = new PropertyDescriptor.Builder() .name("include_policy") .displayName("Include Message Body in Attributes") .description("If true, then the Syslog Message body will be included in the attributes.") .addValidator(StandardValidators.BOOLEAN_VALIDATOR) .allowableValues("true","false") .expressionLanguageSupported(ExpressionLanguageScope.NONE) .required(false) .defaultValue("true") .build(); static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("Any FlowFile that could not be parsed as a Syslog message will be transferred to this Relationship without any attributes being added") .build(); static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("Any FlowFile that is successfully parsed as a Syslog message will be to this Relationship.") .build(); private volatile StrictSyslog5424Parser parser; @Override protected List<PropertyDescriptor> getSupportedPropertyDescriptors() { final List<PropertyDescriptor> properties = new ArrayList<>(2); properties.add(CHARSET); properties.add(NIL_POLICY); properties.add(INCLUDE_BODY_IN_ATTRIBUTES); return properties; } @Override public Set<Relationship> getRelationships() { final Set<Relationship> relationships = new HashSet<>(); relationships.add(REL_FAILURE); relationships.add(REL_SUCCESS); return relationships; } @OnScheduled public void onScheduled(final ProcessContext context) { final String charsetName = context.getProperty(CHARSET).getValue(); final String nilPolicyString = context.getProperty(NIL_POLICY).getValue(); parser = new StrictSyslog5424Parser(Charset.forName(charsetName), NilHandlingPolicy.valueOf(nilPolicyString), NifiStructuredDataPolicy.FLATTEN,new SyslogPrefixedKeyProvider()); } @Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { FlowFile flowFile = session.get(); if (flowFile == null) { return; } boolean includeBody = true; if (context.getProperty(INCLUDE_BODY_IN_ATTRIBUTES).isSet()) { includeBody = context.getProperty(INCLUDE_BODY_IN_ATTRIBUTES).asBoolean(); } final byte[] buffer = new byte[(int) flowFile.getSize()]; session.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream in) throws IOException { StreamUtils.fillBuffer(in, buffer); } }); final Syslog5424Event syslogEvent; try { syslogEvent = parser.parseEvent(buffer, null); } catch (final ProcessException pe) { getLogger().error("Failed to parse {} as a Syslog 5424 message due to {}; routing to failure", new Object[] {flowFile, pe}); session.transfer(flowFile, REL_FAILURE); return; } if (syslogEvent == null || !syslogEvent.isValid()) { getLogger().error("Failed to parse {} as a Syslog message: it does not conform to any of the RFC formats supported; routing to failure", new Object[] {flowFile}); session.transfer(flowFile, REL_FAILURE); return; } Map<String,String> attributeMap = convertMap(syslogEvent.getFieldMap()); if (!includeBody) { attributeMap.remove(SyslogAttributes.SYSLOG_BODY.key()); } flowFile = session.putAllAttributes(flowFile, attributeMap); session.transfer(flowFile, REL_SUCCESS); } private static Map<String,String> convertMap(Map<String, Object> map) { Map<String,String> returnMap = new HashMap<>(); map.forEach((key,value) -> returnMap.put(key,(String)value)); return returnMap; } }
package edu.upenn.library.solrplugins; import java.io.IOException; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.solr.common.util.NamedList; import org.apache.solr.request.FacetPayload; /** * Builds facet payloads from fields containing filing and * prefix strings joined by a delimiter, and a payload attribute * containing information about references. * * The NamedList structure for a facet will look like this: * * <lst name="subject_xfacet"> * <lst name="Hegelianism"> * <int name="count">3</int> * <lst name="self"> * <long name="count">2</long> * <str name="filing">Hegelianism</str> * </lst> * <lst name="refs"> * <lst name="see_also"> * <lst name="Georg Wilhelm Friedrich Hegel"> * <long name="count">1</int> * <str name="prefix">Georg Wilhelm Friedrich </str> * <str name="filing">Hegel</str> * </lst> * </lst> * </lst> * </lst> * </lst> * * @author jeffchiu */ public class JsonReferencePayloadHandler implements FacetPayload<NamedList<Object>> { private static final String DELIM = "\u0000"; private static final String KEY_SELF = "self"; private static final String KEY_REFS = "refs"; private static final String KEY_PREFIX = "prefix"; private static final String KEY_FILING = "filing"; private static final String KEY_COUNT = "count"; /** * overwrite entry in NamedList with new value * (update existing key, or add the key/value if key doesn't already exist) */ private static void overwriteInNamedList(NamedList<Object> namedList, String key, Object value) { int indexOfKey = namedList.indexOf(key, 0); if(indexOfKey != -1) { namedList.setVal(indexOfKey, value); } else { namedList.add(key, value); } } /** * Copies a field value from one NamedList into another */ private static void copyFieldInNamedList(NamedList<Object> from, NamedList<Object> to, String key) { int index = from.indexOf(key, 0); if(index != -1) { Object value = from.get(key); int index2 = to.indexOf(key, 0); if(index2 != -1) { to.setVal(index2, value); } else { to.add(key, value); } } } /** * For passed-in NamedList, get the NamedList value for a certain key, * creating and storing it if it doesn't exist. * @param namedList * @param key */ private static NamedList<Object> getOrCreateNamedListValue(NamedList<Object> namedList, String key) { NamedList<Object> result = (NamedList<Object>) namedList.get(key); if(result == null) { result = new NamedList<>(); namedList.add(key, result); } return result; } /** * increment a Long value in a NamedList stored under "key", creating it with value of 1 * if it doesn't exist. */ private static void incrementLongInNamedList(NamedList<Object> namedList, String key) { int index = namedList.indexOf(key, 0); if(index != -1) { long oldCount = ((Number) namedList.getVal(index)).longValue(); namedList.setVal(index, oldCount + 1L); } else { namedList.add(key, 1L); } } /** * Updates the Long value for the specified key in the 'preExisting' NamedList * by adding the value from the 'add' NamedList. */ private static void mergeCount(NamedList<Object> from, NamedList<Object> to, String key) { // merge count long existingCount = 0; int indexOfCount = to.indexOf(key, 0); if(indexOfCount != -1) { existingCount = ((Number) to.get(key)).longValue(); } long newCount = existingCount + ((Number) from.get(key)).longValue(); overwriteInNamedList(to, key, newCount); } @Override public boolean addEntry(String termKey, long count, Term t, List<Entry<LeafReader, Bits>> leaves, NamedList<NamedList<Object>> res) throws IOException { MultiPartString term = MultiPartString.parseNormalizedFilingAndPrefix(termKey); NamedList<Object> entry = buildEntryValue(term, count, t, leaves); res.add(term.getDisplay(), entry); return true; } @Override public Entry<String, NamedList<Object>> addEntry(String termKey, long count, Term t, List<Entry<LeafReader, Bits>> leaves) throws IOException { MultiPartString term = MultiPartString.parseNormalizedFilingAndPrefix(termKey); return new SimpleImmutableEntry<>(termKey, buildEntryValue(term, count, t, leaves)); } private NamedList<Object> buildEntryValue(MultiPartString term, long count, Term t, List<Entry<LeafReader, Bits>> leaves) throws IOException { NamedList<Object> entry = new NamedList<>(); // document count for this term entry.add(KEY_COUNT, count); NamedList<Object> self = new NamedList<>(); entry.add(KEY_SELF, self); self.add(KEY_COUNT, 0L); overwriteInNamedList(self, KEY_FILING, term.getFiling()); if(term.getPrefix() != null) { overwriteInNamedList(self, KEY_PREFIX, term.getPrefix()); } NamedList<Object> refs = new NamedList<>(); Set<BytesRef> trackDuplicates = new HashSet<>(); for (Entry<LeafReader, Bits> e : leaves) { PostingsEnum postings = e.getKey().postings(t, PostingsEnum.PAYLOADS); if (postings == null) { continue; } Bits liveDocs = e.getValue(); while (postings.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (liveDocs != null && !liveDocs.get(postings.docID())) { continue; } trackDuplicates.clear(); for (int j = 0; j < postings.freq(); j++) { postings.nextPosition(); BytesRef payload = postings.getPayload(); if (!trackDuplicates.add(payload)) { continue; } if (payload != null) { String payloadStr = payload.utf8ToString(); int pos = payloadStr.indexOf(JsonReferencePayloadTokenizer.PAYLOAD_ATTR_SEPARATOR); if (pos != -1) { String referenceType = payloadStr.substring(0, pos); String target = payloadStr.substring(pos + 1); MultiPartString multiPartString = MultiPartString.parseFilingAndPrefix(target); String displayName = multiPartString.getDisplay(); NamedList<Object> displayNameStructs = getOrCreateNamedListValue(refs, referenceType); NamedList<Object> nameStruct = getOrCreateNamedListValue(displayNameStructs, displayName); incrementLongInNamedList(nameStruct, KEY_COUNT); overwriteInNamedList(nameStruct, KEY_FILING, multiPartString.getFiling()); if (multiPartString.getPrefix() != null) { overwriteInNamedList(nameStruct, KEY_PREFIX, multiPartString.getPrefix()); } } } else { // no payload means term is for self, so increment count incrementLongInNamedList(self, KEY_COUNT); } // Couldn't get this to work: postings.attributes() doesn't return anything: why? /* ReferenceAttribute refAtt = postings.attributes().getAttribute(ReferenceAttribute.class); if(refAtt != null) { System.out.println("found refAttr, " + refAtt.getReferenceType() + "," + refAtt.getTarget()); } */ } } } if(refs.size() > 0) { entry.add(KEY_REFS, refs); } return entry; } @Override public NamedList<Object> mergePayload(NamedList<Object> preExisting, NamedList<Object> add, long preExistingCount, long addCount) { if (addCount != ((Number)add.get(KEY_COUNT)).longValue()) { throw new IllegalStateException("fieldType-internal and -external counts do not match"); } int countIndex = preExisting.indexOf(KEY_COUNT, 0); long preCount = ((Number)preExisting.getVal(countIndex)).longValue(); preExisting.setVal(countIndex, preCount + addCount); if(add.get(KEY_SELF) != null) { NamedList<Object> addSelf = (NamedList<Object>) add.get(KEY_SELF); NamedList<Object> preExistingSelf = getOrCreateNamedListValue(preExisting, KEY_SELF); mergeCount(addSelf, preExistingSelf, KEY_COUNT); copyFieldInNamedList(addSelf, preExistingSelf, KEY_FILING); copyFieldInNamedList(addSelf, preExistingSelf, KEY_PREFIX); } if(add.get(KEY_REFS) != null) { NamedList<Object> addRefs = (NamedList<Object>) add.get(KEY_REFS); Iterator<Map.Entry<String, Object>> refTypesIter = addRefs.iterator(); while (refTypesIter.hasNext()) { Map.Entry<String, Object> entry = refTypesIter.next(); String addReferenceType = entry.getKey(); NamedList<Object> addNameStructs = (NamedList<Object>) entry.getValue(); // if "refs" doesn't exist in preExisting yet, create it NamedList<Object> preExistingRefs = getOrCreateNamedListValue(preExisting, KEY_REFS); // if referenceType doesn't exist in preExisting yet, create it NamedList<Object> preExistingNameStructs = getOrCreateNamedListValue(preExistingRefs, addReferenceType); // loop through names and merge them into preExisting Iterator<Map.Entry<String, Object>> addNameStructsIter = addNameStructs.iterator(); while (addNameStructsIter.hasNext()) { Map.Entry<String, Object> nameStructEntry = addNameStructsIter.next(); String name = nameStructEntry.getKey(); NamedList<Object> addNameStruct = (NamedList<Object>) nameStructEntry.getValue(); // if name doesn't exist in preExisting yet, create it NamedList<Object> preExistingNameStruct = getOrCreateNamedListValue(preExistingNameStructs, name); mergeCount(addNameStruct, preExistingNameStruct, KEY_COUNT); copyFieldInNamedList(addNameStruct, preExistingNameStruct, KEY_FILING); copyFieldInNamedList(addNameStruct, preExistingNameStruct, KEY_PREFIX); } } } return preExisting; } @Override public long extractCount(NamedList<Object> val) { return ((Number) val.get(KEY_COUNT)).longValue(); } @Override public Object updateValueExternalRepresentation(NamedList<Object> internal) { return null; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import javax.cache.Cache; import javax.cache.configuration.Factory; import javax.cache.integration.CacheWriterException; import javax.cache.processor.MutableEntry; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheEntryProcessor; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.CachePeekMode; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.cache.store.CacheStore; import org.apache.ignite.cache.store.CacheStoreAdapter; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.NearCacheConfiguration; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.lang.IgniteBiInClosure; import org.apache.ignite.resources.IgniteInstanceResource; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import static org.apache.ignite.IgniteSystemProperties.IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK; /** * Tests for cache client with and without store. */ public class CacheClientStoreSelfTest extends GridCommonAbstractTest { /** */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** */ private static final String CACHE_NAME = "test-cache"; /** */ private volatile boolean nearEnabled; /** */ private volatile Factory<CacheStore> factory; /** */ private volatile CacheMode cacheMode; /** */ private static volatile boolean loadedFromClient; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); boolean client = igniteInstanceName != null && igniteInstanceName.startsWith("client"); cfg.setClientMode(client); if (client) cfg.setDataStorageConfiguration(new DataStorageConfiguration()); CacheConfiguration cc = new CacheConfiguration(DEFAULT_CACHE_NAME); cc.setName(CACHE_NAME); cc.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); cc.setCacheMode(cacheMode); cc.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); cc.setBackups(1); cc.setCacheStoreFactory(factory); if (factory instanceof Factory3) cc.setReadThrough(true); if (client && nearEnabled) cc.setNearConfiguration(new NearCacheConfiguration()); cfg.setCacheConfiguration(cc); TcpDiscoverySpi disco = new TcpDiscoverySpi(); disco.setIpFinder(IP_FINDER); cfg.setDiscoverySpi(disco); return cfg; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); loadedFromClient = false; } /** * @throws Exception If failed. */ public void testCorrectStore() throws Exception { nearEnabled = false; cacheMode = CacheMode.PARTITIONED; factory = new Factory1(); startGrids(2); Ignite ignite = startGrid("client-1"); IgniteCache<Object, Object> cache = ignite.cache(CACHE_NAME); cache.get(0); cache.getAll(F.asSet(0, 1)); cache.getAndPut(0, 0); cache.getAndPutIfAbsent(0, 0); cache.getAndRemove(0); cache.getAndReplace(0, 0); cache.put(0, 0); cache.putAll(F.asMap(0, 0, 1, 1)); cache.putIfAbsent(0, 0); cache.remove(0); cache.remove(0, 0); cache.removeAll(F.asSet(0, 1)); cache.removeAll(); cache.invoke(0, new EP()); cache.invokeAll(F.asSet(0, 1), new EP()); } /** * @throws Exception If failed. */ public void testInvalidStore() throws Exception { nearEnabled = false; cacheMode = CacheMode.PARTITIONED; factory = new Factory1(); startGrids(2); factory = new Factory2(); startGrid("client-1"); } /** * @throws Exception If failed. */ public void testDisabledConsistencyCheck() throws Exception { nearEnabled = false; cacheMode = CacheMode.PARTITIONED; factory = new Factory1(); startGrids(2); factory = new Factory2(); System.setProperty(IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK, "true"); startGrid("client-1"); factory = new Factory1(); System.clearProperty(IGNITE_SKIP_CONFIGURATION_CONSISTENCY_CHECK); startGrid("client-2"); } /** * @throws Exception If failed. */ public void testNoStoreNearDisabled() throws Exception { nearEnabled = false; cacheMode = CacheMode.PARTITIONED; factory = new Factory1(); startGrids(2); doTestNoStore(); } /** * @throws Exception If failed. */ public void testNoStoreNearEnabled() throws Exception { nearEnabled = true; cacheMode = CacheMode.PARTITIONED; factory = new Factory1(); startGrids(2); doTestNoStore(); } /** * @throws Exception If failed. */ private void doTestNoStore() throws Exception { factory = null; Ignite ignite = startGrid("client-1"); IgniteCache<Object, Object> cache = ignite.cache(CACHE_NAME); cache.get(0); cache.getAll(F.asSet(0, 1)); cache.getAndPut(0, 0); cache.getAndPutIfAbsent(0, 0); cache.getAndRemove(0); cache.getAndReplace(0, 0); cache.put(0, 0); cache.putAll(F.asMap(0, 0, 1, 1)); cache.putIfAbsent(0, 0); cache.remove(0); cache.remove(0, 0); cache.removeAll(F.asSet(0, 1)); cache.removeAll(); cache.invoke(0, new EP()); cache.invokeAll(F.asSet(0, 1), new EP()); } /** * Load cache created on client as LOCAL and see if it only loaded on client * * @throws Exception If failed. */ public void testLocalLoadClient() throws Exception { cacheMode = CacheMode.LOCAL; factory = new Factory3(); startGrids(2); Ignite client = startGrid("client-1"); IgniteCache<Object, Object> cache = client.cache(CACHE_NAME); cache.loadCache(null); assertEquals(10, cache.localSize(CachePeekMode.ALL)); assertEquals(0, grid(0).cache(CACHE_NAME).localSize(CachePeekMode.ALL)); assertEquals(0, grid(1).cache(CACHE_NAME).localSize(CachePeekMode.ALL)); assert loadedFromClient; } /** * Load cache from server that created on client as LOCAL and see if it only loaded on server * * @throws Exception If failed. */ public void testLocalLoadServer() throws Exception { cacheMode = CacheMode.LOCAL; factory = new Factory3(); startGrids(2); Ignite client = startGrid("client-1"); IgniteCache cache = grid(0).cache(CACHE_NAME); cache.loadCache(null); assertEquals(10, cache.localSize(CachePeekMode.ALL)); assertEquals(0, grid(1).cache(CACHE_NAME).localSize(CachePeekMode.ALL)); assertEquals(0, client.cache(CACHE_NAME).localSize(CachePeekMode.ALL)); assert !loadedFromClient : "Loaded data from client!"; } /** * Load cache created on client as REPLICATED and see if it only loaded on servers */ public void testReplicatedLoadFromClient() throws Exception { cacheMode = CacheMode.REPLICATED; factory = new Factory3(); startGrids(2); Ignite client = startGrid("client-1"); IgniteCache cache = client.cache(CACHE_NAME); cache.loadCache(null); assertEquals(0, cache.localSize(CachePeekMode.ALL)); assertEquals(10, grid(0).cache(CACHE_NAME).localSize(CachePeekMode.ALL)); assertEquals(10, grid(1).cache(CACHE_NAME).localSize(CachePeekMode.ALL)); assert !loadedFromClient : "Loaded data from client!"; } /** * Load cache created on client as REPLICATED and see if it only loaded on servers */ public void testPartitionedLoadFromClient() throws Exception { cacheMode = CacheMode.PARTITIONED; factory = new Factory3(); startGrids(2); Ignite client = startGrid("client-1"); IgniteCache cache = client.cache(CACHE_NAME); cache.loadCache(null); assertEquals(0, cache.localSize(CachePeekMode.ALL)); assertEquals(10, grid(0).cache(CACHE_NAME).localSize(CachePeekMode.ALL)); assertEquals(10, grid(1).cache(CACHE_NAME).localSize(CachePeekMode.ALL)); assert !loadedFromClient : "Loaded data from client!"; } /** */ private static class Factory1 implements Factory<CacheStore> { /** {@inheritDoc} */ @Override public CacheStore create() { return null; } } /** */ private static class Factory2 implements Factory<CacheStore> { /** {@inheritDoc} */ @Override public CacheStore create() { return null; } } /** */ private static class Factory3 implements Factory<CacheStore> { /** {@inheritDoc} */ @Override public CacheStore create() { return new TestStore(); } } /** */ private static class EP implements CacheEntryProcessor { /** {@inheritDoc} */ @Override public Object process(MutableEntry entry, Object... arguments) { return null; } } /** * Test store that loads 10 item */ public static class TestStore extends CacheStoreAdapter<Object, Object> { /** */ @IgniteInstanceResource private Ignite ignite; /** {@inheritDoc} */ @Override public Integer load(Object key) { return null; } /** {@inheritDoc} */ @Override public void write(Cache.Entry<?, ?> entry) { // No-op. } /** {@inheritDoc} */ @Override public void delete(Object key) throws CacheWriterException { // No-op. } /** {@inheritDoc} */ @Override public void loadCache(IgniteBiInClosure<Object, Object> clo, Object... args) { if (ignite.cluster().localNode().isClient()) loadedFromClient = true; for (int i = 0; i < 10; i++) clo.apply(i, i); } } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.cxx; import com.facebook.buck.core.build.context.BuildContext; import com.facebook.buck.core.build.execution.context.StepExecutionContext; import com.facebook.buck.core.cell.nameresolver.CellNameResolver; import com.facebook.buck.core.filesystems.AbsPath; import com.facebook.buck.core.filesystems.RelPath; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.Flavor; import com.facebook.buck.core.model.InternalFlavor; import com.facebook.buck.core.parser.buildtargetparser.BuildTargetMatcher; import com.facebook.buck.core.parser.buildtargetparser.BuildTargetMatcherParser; import com.facebook.buck.core.rulekey.AddToRuleKey; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.core.sourcepath.SourcePath; import com.facebook.buck.io.filesystem.ProjectFilesystem; import com.facebook.buck.rules.modern.BuildCellRelativePathFactory; import com.facebook.buck.rules.modern.Buildable; import com.facebook.buck.rules.modern.ModernBuildRule; import com.facebook.buck.rules.modern.OutputPath; import com.facebook.buck.rules.modern.OutputPathResolver; import com.facebook.buck.step.AbstractExecutionStep; import com.facebook.buck.step.Step; import com.facebook.buck.step.StepExecutionResult; import com.facebook.buck.step.StepExecutionResults; import com.facebook.buck.util.json.ObjectMappers; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import java.io.IOException; import java.io.OutputStream; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; /** * Build rule for acquiring focused debug targets belonging to this build rule's dependent binary * build rule. * * <p>Under focused debugging, we have a focused targets json file which contains all focused * targets. For each binary to be linked, we filter the list of focused targets to grab the targets * that belong to each binary. This allows us to update only the affected binaries when the focused * targets json file's modified. * * <p>This rule outputs the intersection of focused targets and the link dependencies of this rule's * build target. */ public class CxxFocusedDebugTargets extends ModernBuildRule<CxxFocusedDebugTargets.Impl> { public static final Flavor FOCUSED_DEBUG_TARGETS = InternalFlavor.of("focused-debug-targets"); private static final String OUTPUT_FILENAME = "focused_targets.json"; static class Impl implements Buildable { @AddToRuleKey private final OutputPath output; @AddToRuleKey private final Optional<SourcePath> focusedTargetsList; // The targets representing the static libs to be linked by this build rule's corresponding // CxxLink rule @AddToRuleKey private final ImmutableList<BuildTarget> linkDependencyTargets; Impl( BuildTarget buildTarget, Optional<SourcePath> focusedTargetsList, ImmutableList<BuildTarget> linkDependencyTargets) { this.output = new OutputPath(OUTPUT_FILENAME); this.focusedTargetsList = focusedTargetsList; List<BuildTarget> allTargets = new ArrayList<>(linkDependencyTargets); allTargets.add(buildTarget); this.linkDependencyTargets = ImmutableList.copyOf(allTargets); } @Override public ImmutableList<Step> getBuildSteps( BuildContext buildContext, ProjectFilesystem filesystem, OutputPathResolver outputPathResolver, BuildCellRelativePathFactory buildCellPathFactory) { ImmutableList.Builder<Step> builder = ImmutableList.builder(); RelPath outputPath = outputPathResolver.resolvePath(output); builder.add( new GenerateFilteredFocusedDebugTargetsJson( buildContext.getCellPathResolver().getCellNameResolver(), filesystem, outputPath.getPath(), focusedTargetsList.map(buildContext.getSourcePathResolver()::getAbsolutePath), linkDependencyTargets)); return builder.build(); } } public CxxFocusedDebugTargets( BuildTarget buildTarget, ProjectFilesystem projectFilesystem, SourcePathRuleFinder ruleFinder, Optional<SourcePath> focusedTargetsList, ImmutableList<BuildTarget> linkDependencyTargets) { super( buildTarget, projectFilesystem, ruleFinder, new Impl(buildTarget, focusedTargetsList, linkDependencyTargets)); } @Override public SourcePath getSourcePathToOutput() { return getSourcePath(getBuildable().output); } /** Writes out focused targets included in the list of given targets to a JSON array. */ static class GenerateFilteredFocusedDebugTargetsJson extends AbstractExecutionStep { private static final BuildTargetMatcherParser<BuildTargetMatcher> buildTargetPatternParser = BuildTargetMatcherParser.forVisibilityArgument(); private final CellNameResolver cellNameResolver; private final Path outputRelativePath; private final ProjectFilesystem filesystem; private final ImmutableList<BuildTarget> targets; private final Optional<AbsPath> focusedTargetsList; public GenerateFilteredFocusedDebugTargetsJson( CellNameResolver cellNameResolver, ProjectFilesystem fs, Path outputRelativePath, Optional<AbsPath> focusedTargetsList, ImmutableList<BuildTarget> targets) { super("generate " + OUTPUT_FILENAME); this.cellNameResolver = cellNameResolver; this.filesystem = fs; this.outputRelativePath = outputRelativePath; this.focusedTargetsList = focusedTargetsList; this.targets = targets; } @Override public StepExecutionResult execute(StepExecutionContext context) throws IOException { Set<BuildTarget> filteredTargetSet; if (focusedTargetsList.isPresent()) { Map<String, Object> focusedDict = ObjectMappers.READER.readValue( ObjectMappers.createParser(focusedTargetsList.get().getPath()), new TypeReference<LinkedHashMap<String, Object>>() {}); @SuppressWarnings("unchecked") List<String> focusedTargetPatterns = (List<String>) focusedDict.get("targets"); filteredTargetSet = new HashSet<>(); for (String focusedPattern : focusedTargetPatterns) { filteredTargetSet.addAll(getMatchingFocusedTargets(targets, focusedPattern)); } } else { // If no focused targets list is provided, then none of this rule's deps is used for // debugging. filteredTargetSet = ImmutableSet.of(); } // Ensure we sort the targets to prevent running into rule key differs. List<BuildTarget> filteredTargets = filteredTargetSet.stream().sorted().collect(Collectors.toList()); try (OutputStream outputStream = filesystem.newFileOutputStream(outputRelativePath)) { try (JsonGenerator jsonGen = ObjectMappers.createGenerator(outputStream)) { jsonGen.writeStartArray(); for (BuildTarget target : filteredTargets) { jsonGen.writeString(target.getUnflavoredBuildTarget().toString()); } jsonGen.writeEndArray(); } } return StepExecutionResults.SUCCESS; } private Set<BuildTarget> getMatchingFocusedTargets( ImmutableList<BuildTarget> targets, String focusedPattern) { BuildTargetMatcher targetMatcher = buildTargetPatternParser.parse(focusedPattern, cellNameResolver); return targets.stream() .filter(target -> targetMatcher.matches(target.getUnconfiguredBuildTarget())) .collect(Collectors.toSet()); } } }
/* * ! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Pentaho : http://www.pentaho.com * * ****************************************************************************** * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ***************************************************************************** */ package org.pentaho.di.trans.ael.websocket; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.engine.api.events.PDIEvent; import org.pentaho.di.engine.api.model.Operation; import org.pentaho.di.engine.api.model.Transformation; import org.pentaho.di.engine.api.remote.ExecutionRequest; import org.pentaho.di.engine.api.remote.Message; import org.pentaho.di.engine.api.remote.RemoteSource; import org.pentaho.di.engine.api.remote.StopMessage; import org.pentaho.di.engine.api.reporting.LogEntry; import org.pentaho.di.engine.api.reporting.LogLevel; import org.pentaho.di.engine.api.reporting.Status; import org.pentaho.di.engine.model.ActingPrincipal; import org.pentaho.di.trans.RowProducer; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.ael.adapters.TransMetaConverter; import org.pentaho.di.trans.ael.websocket.exception.HandlerRegistrationException; import org.pentaho.di.trans.ael.websocket.exception.MessageEventHandlerExecutionException; import org.pentaho.di.trans.ael.websocket.handler.MessageEventHandler; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaDataCombi; import java.security.Principal; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.function.Function; import java.util.Collection; import static java.util.stream.Collectors.toMap; /** * Created by fcamara on 8/17/17. */ public class TransWebSocketEngineAdapter extends Trans { private static final String ANONYMOUS_PRINCIPAL = "anonymous"; private static final String OPERATION_LOG = "OPERATION_LOG_TRANS_WEBSOCK_"; private static final String TRANSFORMATION_LOG = "TRANSFORMATION_LOG_TRANS_WEBSOCK"; private static final String TRANSFORMATION_STATUS = "TRANSFORMATION_STATUS_TRANS_WEBSOCK"; private static final String TRANSFORMATION_ERROR = "TRANSFORMATION_ERROR_TRANS_WEBSOCK"; private static final String TRANSFORMATION_STOP = "TRANSFORMATION_STOP_TRANS_WEBSOCK"; private final Transformation transformation; private ExecutionRequest executionRequest; private DaemonMessagesClientEndpoint daemonMessagesClientEndpoint = null; private final MessageEventService messageEventService; private LogLevel logLevel = null; private final String host; private final String port; private final boolean ssl; //completion signal used to wait until Transformation is finished private CountDownLatch transFinishedSignal = new CountDownLatch( 1 ); private static final Map<org.pentaho.di.core.logging.LogLevel, LogLevel> LEVEL_MAP = new HashMap<>(); static { LEVEL_MAP.put( org.pentaho.di.core.logging.LogLevel.BASIC, LogLevel.BASIC ); LEVEL_MAP.put( org.pentaho.di.core.logging.LogLevel.DEBUG, LogLevel.DEBUG ); LEVEL_MAP.put( org.pentaho.di.core.logging.LogLevel.DETAILED, LogLevel.DETAILED ); LEVEL_MAP.put( org.pentaho.di.core.logging.LogLevel.ERROR, LogLevel.ERROR ); LEVEL_MAP.put( org.pentaho.di.core.logging.LogLevel.MINIMAL, LogLevel.MINIMAL ); LEVEL_MAP.put( org.pentaho.di.core.logging.LogLevel.ROWLEVEL, LogLevel.TRACE ); } public TransWebSocketEngineAdapter( TransMeta transMeta, String host, String port, boolean ssl ) { transformation = TransMetaConverter.convert( transMeta ); this.transMeta = transMeta; this.messageEventService = new MessageEventService(); this.host = host; this.port = port; this.ssl = ssl; } private DaemonMessagesClientEndpoint getDaemonEndpoint() throws KettleException { try { if ( daemonMessagesClientEndpoint == null ) { daemonMessagesClientEndpoint = new DaemonMessagesClientEndpoint( host, port, ssl, messageEventService ); } return daemonMessagesClientEndpoint; } catch ( KettleException e ) { throw e; } } @Override public void setLogLevel( org.pentaho.di.core.logging.LogLevel logLogLevel ) { this.logLevel = LEVEL_MAP.getOrDefault( logLogLevel, LogLevel.MINIMAL ); } @Override public void killAll() { throw new UnsupportedOperationException( "Not yet implemented" ); } @Override public void stopAll() { try { getDaemonEndpoint().sendMessage( new StopMessage( "User Request" ) ); } catch ( KettleException e ) { getLogChannel().logError( "Error finalizing the transformation", e ); } } @Override public void prepareExecution( String[] arguments ) throws KettleException { activateParameters(); transMeta.activateParameters(); transMeta.setInternalKettleVariables(); Map<String, Object> env = Arrays.stream( transMeta.listVariables() ) .collect( toMap( Function.identity(), transMeta::getVariable ) ); this.executionRequest = new ExecutionRequest( new HashMap<>(), env, transformation, new HashMap<>(), logLevel, getActingPrincipal( transMeta ) ); setSteps( new ArrayList<>( opsToSteps() ) ); wireStatusToTransListeners(); subscribeToOpLogging(); subscribeToTransLogging(); setReadyToStart( true ); } private void logToChannel( LogChannelInterface logChannel, LogEntry data ) { LogLevel logLogLevel = data.getLogLogLevel(); switch ( logLogLevel ) { case ERROR: logChannel.logError( data.getMessage() ); break; case MINIMAL: logChannel.logMinimal( data.getMessage() ); break; case BASIC: logChannel.logBasic( data.getMessage() ); break; case DETAILED: logChannel.logDetailed( data.getMessage() ); break; case DEBUG: logChannel.logDebug( data.getMessage() ); break; case TRACE: logChannel.logRowlevel( data.getMessage() ); break; } } private void subscribeToOpLogging() throws KettleException { transformation.getOperations().stream().forEach( operation -> { try { messageEventService.addHandler( Util.getOperationLogEvent( operation.getId() ), new MessageEventHandler() { @Override public void execute( Message message ) throws MessageEventHandlerExecutionException { PDIEvent<RemoteSource, LogEntry> event = (PDIEvent<RemoteSource, LogEntry>) message; LogEntry logEntry = event.getData(); StepInterface stepInterface = findStepInterface( operation.getId(), 0 ); if ( stepInterface != null ) { LogChannelInterface logChannel = stepInterface.getLogChannel(); logToChannel( logChannel, logEntry ); } else { // Could not find step, log at transformation level instead logToChannel( getLogChannel(), logEntry ); } } @Override public String getIdentifier() { return OPERATION_LOG + operation.getId(); } } ); } catch ( HandlerRegistrationException e ) { getLogChannel().logError( "Error registering message handlers", e ); } } ); } private void subscribeToTransLogging() throws KettleException { messageEventService.addHandler( Util.getTransformationLogEvent(), new MessageEventHandler() { @Override public void execute( Message message ) throws MessageEventHandlerExecutionException { PDIEvent<RemoteSource, LogEntry> event = (PDIEvent<RemoteSource, LogEntry>) message; LogEntry data = event.getData(); logToChannel( getLogChannel(), data ); } @Override public String getIdentifier() { return TRANSFORMATION_LOG; } } ); } private void wireStatusToTransListeners() throws KettleException { messageEventService.addHandler( Util.getTransformationStatusEvent(), new MessageEventHandler() { @Override public void execute( Message message ) throws MessageEventHandlerExecutionException { PDIEvent<RemoteSource, Status> transStatusEvent = (PDIEvent<RemoteSource, Status>) message; addStepPerformanceSnapShot(); getTransListeners().forEach( l -> { try { switch ( transStatusEvent.getData() ) { case RUNNING: l.transStarted( TransWebSocketEngineAdapter.this ); l.transActive( TransWebSocketEngineAdapter.this ); break; case PAUSED: break; case STOPPED: break; case FAILED: case FINISHED: l.transFinished( TransWebSocketEngineAdapter.this ); setFinished( true ); break; } } catch ( KettleException e ) { throw new RuntimeException( e ); } } ); } @Override public String getIdentifier() { return TRANSFORMATION_STATUS; } } ); messageEventService .addHandler( Util.getTransformationErrorEvent(), new MessageEventHandler() { @Override public void execute( Message message ) throws MessageEventHandlerExecutionException { Throwable throwable = ( (PDIEvent<RemoteSource, LogEntry>) message ).getData().getThrowable(); getLogChannel().logError( "Error Executing Transformation", throwable ); setFinished( true ); // emit error on all steps getSteps().stream().map( stepMetaDataCombi -> stepMetaDataCombi.step ).forEach( step -> { step.setStopped( true ); step.setRunning( false ); } ); getTransListeners().forEach( l -> { try { l.transFinished( TransWebSocketEngineAdapter.this ); } catch ( KettleException e ) { getLogChannel().logError( "Error notifying trans listener", e ); } } ); } @Override public String getIdentifier() { return TRANSFORMATION_ERROR; } } ); messageEventService .addHandler( Util.getStopMessage(), new MessageEventHandler() { @Override public void execute( Message message ) throws MessageEventHandlerExecutionException { setFinished( true ); getTransListeners().forEach( l -> { try { l.transFinished( TransWebSocketEngineAdapter.this ); } catch ( KettleException e ) { getLogChannel().logError( "Error notifying trans listener", e ); } } ); try { getDaemonEndpoint().close(); } catch ( KettleException e ) { getLogChannel().logError( "Error finalizing", e ); } // Signal for the the waitUntilFinished blocker... transFinishedSignal.countDown(); } @Override public String getIdentifier() { return TRANSFORMATION_STOP; } } ); } private Collection<StepMetaDataCombi> opsToSteps() { Map<Operation, StepMetaDataCombi> operationToCombi = transformation.getOperations().stream() .collect( toMap( Function.identity(), op -> { StepMetaDataCombi combi = new StepMetaDataCombi(); combi.stepMeta = StepMeta.fromXml( (String) op.getConfig().get( TransMetaConverter.STEP_META_CONF_KEY ) ); try { combi.data = new StepDataInterfaceWebSocketEngineAdapter( op, messageEventService ); combi.step = new StepInterfaceWebSocketEngineAdapter( op, messageEventService, combi.stepMeta, transMeta, combi.data, this ); } catch ( KettleException e ) { //TODO: treat the exception e.printStackTrace(); } combi.meta = combi.stepMeta.getStepMetaInterface(); combi.stepname = combi.stepMeta.getName(); return combi; } ) ); return operationToCombi.values(); } @Override public void startThreads() throws KettleException { getDaemonEndpoint().sendMessage( executionRequest ); } @Override public void waitUntilFinished() { try { transFinishedSignal.await(); } catch ( InterruptedException e ) { throw new RuntimeException( "Waiting for transformation to be finished interrupted!", e ); } } // ======================== May want to implement ================================= // @Override public RowProducer addRowProducer( String stepname, int copynr ) throws KettleException { throw new UnsupportedOperationException( "Not yet implemented" ); } private Principal getActingPrincipal( TransMeta transMeta ) { if ( transMeta.getRepository() == null || transMeta.getRepository().getUserInfo() == null ) { return new ActingPrincipal( ANONYMOUS_PRINCIPAL ); } return new ActingPrincipal( transMeta.getRepository().getUserInfo().getName() ); } }
package ar.com.tzulberti.archerytraining.database; import android.database.sqlite.SQLiteDatabase; import ar.com.tzulberti.archerytraining.model.bow.Bow; import ar.com.tzulberti.archerytraining.model.bow.SightDistanceValue; import ar.com.tzulberti.archerytraining.model.constrains.RoundConstraint; import ar.com.tzulberti.archerytraining.model.constrains.TournamentConstraint; import ar.com.tzulberti.archerytraining.model.playoff.ComputerPlayOffConfiguration; import ar.com.tzulberti.archerytraining.model.playoff.HumanPlayoffConfiguration; import ar.com.tzulberti.archerytraining.model.playoff.Playoff; import ar.com.tzulberti.archerytraining.model.playoff.PlayoffSerie; import ar.com.tzulberti.archerytraining.model.playoff.PlayoffSerieArrow; import ar.com.tzulberti.archerytraining.model.series.SerieData; import ar.com.tzulberti.archerytraining.model.tournament.Tournament; import ar.com.tzulberti.archerytraining.model.tournament.TournamentSerie; import ar.com.tzulberti.archerytraining.model.tournament.TournamentSerieArrow; /** * Used to create all the tables when the application is started for the first time * * In this case no migration is going to be executed so this is going to create all the * required tables. Because of this, this can not be used from the database migrations * to create the tables for the first time * * * Created by tzulberti on 6/3/17. */ class TablesCreator { void createAll(SQLiteDatabase db) { this.createRoundConstraintTable(db); this.createTournamentConstraintTable(db); this.createSeriesTable(db); this.createTournamentTable(db); this.createTournamentSerieTable(db); this.createTournamentSerieArrowTable(db); this.createPlayoffTable(db); this.createPlayoffSerieTable(db); this.createPlayoffSerieArrowTable(db); this.createComputerPlayoffTable(db); this.createHumanPlayoffTable(db); this.createBowTable(db); this.createSightValueTable(db); } private void createRoundConstraintTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + RoundConstraint.TABLE_NAME + " (" + RoundConstraint.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + RoundConstraint.DISTANCE_COLUMN_NAME + " INTEGER NOT NULL, " + RoundConstraint.SERIES_PER_ROUND_COLUMN_NAME + " INTEGER NOT NULL, " + RoundConstraint.ARROWS_PER_SERIES_COLUMN_NAME + " INTEGER NOT NULL, " + RoundConstraint.MIN_SCORE_COLUMN_NAME + " INTEGER NOT NULL, " + RoundConstraint.MAX_SCORE_COLUMN_NAME + " INTEGER NOT NULL, " + RoundConstraint.TARGET_IMAGE_COLUMN_NAME + " TEXT NOT NULL " + ")" ); } private void createTournamentConstraintTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + TournamentConstraint.TABLE_NAME + " (" + TournamentConstraint.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + TournamentConstraint.NAME_COLUMN_NAME + " TEXT NOT NULL, " + TournamentConstraint.IS_OUTDOOR_COLUMN_NAME + " INTEGER NOT NULL, " + TournamentConstraint.STRING_XML_KEY_COLUMN_NAME + " TEXT NOT NULL, " + TournamentConstraint.ROUND_CONSTRAINT_1_ID_COLUMN_NAME + " INTEGER NOT NULL, " + TournamentConstraint.ROUND_CONSTRAINT_2_ID_COLUMN_NAME + " INTEGER, " + TournamentConstraint.ROUND_CONSTRAINT_3_ID_COLUMN_NAME + " INTEGER, " + TournamentConstraint.ROUND_CONSTRAINT_4_ID_COLUMN_NAME + " INTEGER, " + TournamentConstraint.ROUND_CONSTRAINT_5_ID_COLUMN_NAME + " INTEGER, " + TournamentConstraint.ROUND_CONSTRAINT_6_ID_COLUMN_NAME + " INTEGER, " + "FOREIGN KEY (" + TournamentConstraint.ROUND_CONSTRAINT_1_ID_COLUMN_NAME + ") REFERENCES " + RoundConstraint.TABLE_NAME + " ( " + RoundConstraint.ID_COLUMN_NAME + " ), " + "FOREIGN KEY (" + TournamentConstraint.ROUND_CONSTRAINT_2_ID_COLUMN_NAME + ") REFERENCES " + RoundConstraint.TABLE_NAME + " ( " + RoundConstraint.ID_COLUMN_NAME + " ), " + "FOREIGN KEY (" + TournamentConstraint.ROUND_CONSTRAINT_3_ID_COLUMN_NAME + ") REFERENCES " + RoundConstraint.TABLE_NAME + " ( " + RoundConstraint.ID_COLUMN_NAME + " ), " + "FOREIGN KEY (" + TournamentConstraint.ROUND_CONSTRAINT_4_ID_COLUMN_NAME + ") REFERENCES " + RoundConstraint.TABLE_NAME + " ( " + RoundConstraint.ID_COLUMN_NAME + " ), " + "FOREIGN KEY (" + TournamentConstraint.ROUND_CONSTRAINT_5_ID_COLUMN_NAME + ") REFERENCES " + RoundConstraint.TABLE_NAME + " ( " + RoundConstraint.ID_COLUMN_NAME + " ), " + "FOREIGN KEY (" + TournamentConstraint.ROUND_CONSTRAINT_6_ID_COLUMN_NAME + ") REFERENCES " + RoundConstraint.TABLE_NAME + " ( " + RoundConstraint.ID_COLUMN_NAME + " ) " + ");" ); } private void createSeriesTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + SerieData.TABLE_NAME + "( " + SerieData.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + SerieData.DATETIME_COLUMN_NAME + " LONG NOT NULL, " + SerieData.DISTANCE_COLUMN_NAME + " INTEGER NOT NULL, " + SerieData.ARROWS_AMOUNT_COLUMN_NAME + " INTEGER NOT NULL, " + SerieData.TRAINING_TYPE_COLUMN_NAME + " INTEGER NOT NULL, " + SerieData.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0 " + ");" ); } private void createTournamentTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + Tournament.TABLE_NAME + "( " + Tournament.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + Tournament.NAME_COLUMN_NAME + " TEXT NOT NULL, " + Tournament.DATETIME_COLUMN_NAME + " LONG NOT NULL, " + Tournament.IS_TOURNAMENT_DATA_COLUMN_NAME + " INTEGER NOT NULL, " + Tournament.TOTAL_SCORE_COLUMN_NAME + " INTEGER NOT NULL DEFAULT 0," + Tournament.TOURNAMENT_CONSTRAINT_ID_COLUMN_NAME + " INTEGER NOT NULL, " + Tournament.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0, " + "FOREIGN KEY (" + Tournament.TOURNAMENT_CONSTRAINT_ID_COLUMN_NAME + ") REFERENCES " + TournamentConstraint.TABLE_NAME + " ( " + TournamentConstraint.ID_COLUMN_NAME + " ) " + ");" ); } private void createTournamentSerieTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + TournamentSerie.TABLE_NAME + " (" + TournamentSerie.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + TournamentSerie.TOURNAMENT_ID_COLUMN_NAME + " INTEGER NOT NULL, " + TournamentSerie.SERIE_INDEX_COLUMN_NAME + " INTEGER NOT NULL, " + TournamentSerie.TOTAL_SCORE_COLUMN_NAME + " INTEGER NOT NULL, " + TournamentSerie.ROUND_INDEX_COLUMN_NAME + " INTEGER NOT NULL, " + TournamentSerie.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0, " + "FOREIGN KEY (" + TournamentSerie.TOURNAMENT_ID_COLUMN_NAME + ") REFERENCES " + Tournament.TABLE_NAME + " ( " + Tournament.ID_COLUMN_NAME + " ), " + "CONSTRAINT unq_serie_index_tournament_id UNIQUE (" + TournamentSerie.SERIE_INDEX_COLUMN_NAME + ", " + TournamentSerie.TOURNAMENT_ID_COLUMN_NAME + ")" + ");" ); } private void createTournamentSerieArrowTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + TournamentSerieArrow.TABLE_NAME + " (" + TournamentSerieArrow.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + TournamentSerieArrow.TOURNAMENT_ID_COLUMN_NAME + " INTEGER NOT NULL, " + TournamentSerieArrow.SERIE_INDEX_COLUMN_NAME + " INTEGER NOT NULL, " + TournamentSerieArrow.SCORE_COLUMN_NAME + " INTEGER NOT NULL, " + TournamentSerieArrow.X_POSITION_COLUMN_NAME + " REAL NOT NULL, " + TournamentSerieArrow.Y_POSITION_COLUMN_NAME + " REAL NOT NULL, " + TournamentSerieArrow.IS_X_COLUMN_NAME + " INTEGER NOT NULL DEFAULT 0, " + TournamentSerieArrow.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0, " + "FOREIGN KEY (" + TournamentSerieArrow.TOURNAMENT_ID_COLUMN_NAME + ") REFERENCES " + Tournament.TABLE_NAME + " ( " + Tournament.ID_COLUMN_NAME + " ), " + "FOREIGN KEY (" + TournamentSerieArrow.SERIE_INDEX_COLUMN_NAME + ") REFERENCES " + TournamentSerie.TABLE_NAME + " ( " + TournamentSerie.ID_COLUMN_NAME + " ) " + ");" ); } private void createPlayoffTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + Playoff.TABLE_NAME + " (" + Playoff.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + Playoff.DATETIME_COLUMN_NAME + " LONG NOT NULL, " + Playoff.NAME_COLUMN_NAME + " TEXT NOT NULL, " + Playoff.OPPONENT_PLAYOFF_SCORE_COLUMN_NAME + " INTEGER NOT NULL DEFAULT 0, " + Playoff.USER_PLAYOFF_SCORE_COLUMN_NAME + " INTEGER NOT NULL DEFAULT 0, " + Playoff.TOURNAMENT_CONSTRAINT_ID_COLUMN_NAME + " INTEGER NOT NULL, " + Playoff.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0, " + "FOREIGN KEY (" + Playoff.TOURNAMENT_CONSTRAINT_ID_COLUMN_NAME + ") REFERENCES " + TournamentConstraint.TABLE_NAME + " ( " + TournamentConstraint.ID_COLUMN_NAME + " ) " + ")" ); } private void createPlayoffSerieTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + PlayoffSerie.TABLE_NAME + " (" + PlayoffSerie.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + PlayoffSerie.SERIE_INDEX_COLUMN_NAME + " INTEGER NOT NULL, " + PlayoffSerie.PLAYOFF_ID_COLUMN_NAME + " INTEGER NOT NULL, " + PlayoffSerie.OPPONENT_TOTAL_SCORE_COLUMN_NAME_COLUMN_NAME + " INTEGER NOT NULL, " + PlayoffSerie.USER_TOTAL_SCORE_COLUMN_NAME + " INTEGER NOT NULL, " + PlayoffSerie.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0, " + "FOREIGN KEY (" + PlayoffSerie.PLAYOFF_ID_COLUMN_NAME + ") REFERENCES " + Playoff.TABLE_NAME + " ( " + Playoff.ID_COLUMN_NAME + " ) " + ")" ); } private void createPlayoffSerieArrowTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + PlayoffSerieArrow.TABLE_NAME + " (" + PlayoffSerieArrow.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + PlayoffSerieArrow.PLAYOFF_ID_COLUMN_NAME + " INTEGER NOT NULL, " + PlayoffSerieArrow.SERIE_ID_COLUMN_NAME + " INTEGER NOT NULL, " + PlayoffSerieArrow.SCORE_COLUMN_NAME + " INTEGER NOT NULL, " + PlayoffSerieArrow.X_POSITION_COLUMN_NAME + " REAL NOT NULL, " + PlayoffSerieArrow.Y_POSITION_COLUMN_NAME + " REAL NOT NULL, " + PlayoffSerieArrow.IS_X_COLUMN_NAME + " INTEGER NOT NULL DEFAULT 0, " + PlayoffSerieArrow.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0, " + "FOREIGN KEY (" + PlayoffSerieArrow.PLAYOFF_ID_COLUMN_NAME + ") REFERENCES " + Playoff.TABLE_NAME + " ( " + Playoff.ID_COLUMN_NAME + " ), " + "FOREIGN KEY (" + PlayoffSerieArrow.SERIE_ID_COLUMN_NAME + ") REFERENCES " + PlayoffSerie.TABLE_NAME + " ( " + PlayoffSerie.ID_COLUMN_NAME + " ) " + ");" ); } private void createComputerPlayoffTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + ComputerPlayOffConfiguration.TABLE_NAME + " (" + ComputerPlayOffConfiguration.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + ComputerPlayOffConfiguration.PLAYOFF_ID_COLUMN_NAME + " INTEGER NOT NULL, " + ComputerPlayOffConfiguration.MIN_SCORE_COLUMN_NAME + " INTEGER NOT NULL, " + ComputerPlayOffConfiguration.MAX_SCORE_COLUMN_NAME + " INTEGER NOT NULL, " + ComputerPlayOffConfiguration.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0, " + "FOREIGN KEY (" + ComputerPlayOffConfiguration.PLAYOFF_ID_COLUMN_NAME + ") REFERENCES " + Playoff.TABLE_NAME + " ( " + Playoff.ID_COLUMN_NAME + " ) " + ");" ); } private void createHumanPlayoffTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + HumanPlayoffConfiguration.TABLE_NAME + " (" + HumanPlayoffConfiguration.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + HumanPlayoffConfiguration.PLAYOFF_ID_COLUMN_NAME + " INTEGER NOT NULL, " + HumanPlayoffConfiguration.OPPONENT_NAME_COLUMN_NAME + " TEXT NOT NULL, " + HumanPlayoffConfiguration.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0, " + "FOREIGN KEY (" + HumanPlayoffConfiguration.PLAYOFF_ID_COLUMN_NAME + ") REFERENCES " + Playoff.TABLE_NAME + " ( " + Playoff.ID_COLUMN_NAME + " ) " + ");" ); } private void createBowTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + Bow.TABLE_NAME + " (" + Bow.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + Bow.NAME_COLUMN_NAME + " TEXT NOT NULL, " + Bow.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0 " + ")" ); } private void createSightValueTable(SQLiteDatabase db) { db.execSQL( "CREATE TABLE " + SightDistanceValue.TABLE_NAME + " (" + SightDistanceValue.ID_COLUMN_NAME + " INTEGER PRIMARY KEY AUTOINCREMENT, " + SightDistanceValue.BOW_ID_COLUMN_NAME + " INTEGER NOT NULL, " + SightDistanceValue.DISTANCE_COLUMN_NAME + " INTEGER NOT NULL, " + SightDistanceValue.SIGHT_VALUE_COLUMN_NAME + " FLOAT NOT NULL, " + SightDistanceValue.IS_SYNCED + " INTEGER NOT NULL DEFAULT 0, " + "FOREIGN KEY (" + SightDistanceValue.BOW_ID_COLUMN_NAME + ") REFERENCES " + Bow.TABLE_NAME + " ( " + Bow.ID_COLUMN_NAME + " ) " + ")" ); } }
package com.codepath.apps.restclienttemplate.fragments; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Toast; import com.codepath.apps.restclienttemplate.R; import com.codepath.apps.restclienttemplate.TweetAdapter; import com.codepath.apps.restclienttemplate.TwitterApplication; import com.codepath.apps.restclienttemplate.TwitterClient; import com.codepath.apps.restclienttemplate.models.Tweet; import com.loopj.android.http.JsonHttpResponseHandler; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.ArrayList; import cz.msebera.android.httpclient.Header; /** * Created by veviego on 7/7/17. */ public class SearchTweetsFragment extends Fragment { private TwitterClient client; public TweetAdapter tweetAdapter; public ArrayList<Tweet> tweets; public RecyclerView rvTweets; public SwipeRefreshLayout swipeContainer; String query; public static SearchTweetsFragment newInstance(String query) { SearchTweetsFragment searchTweetsFragment = new SearchTweetsFragment(); Bundle args = new Bundle(); args.putString("query", query); searchTweetsFragment.setArguments(args); return searchTweetsFragment; } // Inflation happens in onCreateView @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { // Inflate the layout View v = inflater.inflate(R.layout.fragments_tweet_list, container, false); client = TwitterApplication.getRestClient(); // find the RecyclerView rvTweets = (RecyclerView) v.findViewById(R.id.rvTweet); rvTweets.addItemDecoration(new com.codepath.apps.restclienttemplate.DividerItemDecoration(getContext())); // init the ArrayList (data source) tweets = new ArrayList<Tweet>(); // construct the adapter from this data source tweetAdapter = new TweetAdapter(tweets); // RecyclerView setup (layout manager, use adapter) rvTweets.setLayoutManager(new LinearLayoutManager(getContext())); rvTweets.setAdapter(tweetAdapter); query = getArguments().getString("query"); // Lookup the swipe container view swipeContainer = (SwipeRefreshLayout) v.findViewById(R.id.swipeContainer); // Setup refresh listener which triggers new data loading swipeContainer.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { // Your code to refresh the list here. // Make sure you call swipeContainer.setRefreshing(false) // once the network request has completed successfully. fetchTimelineAsync(0); } }); // Configure the refreshing colors swipeContainer.setColorSchemeResources(android.R.color.holo_blue_bright, android.R.color.holo_green_light, android.R.color.holo_orange_light, android.R.color.holo_red_light); populateTimeline(); return v; } public void fetchTimelineAsync(int page) { // Send the network request to fetch the updated data // `client` here is an instance of Android Async HTTP // getHomeTimeline is an example endpoint. client.searchTweets(query, new JsonHttpResponseHandler() { @Override public void onSuccess(int statusCode, Header[] headers, JSONObject response) { // Remember to CLEAR OUT old items before appending in the new ones tweetAdapter.clear(); // ...the data has come back, add new items to your adapter... // iterate through JSON array // for each entry, deserialize the JSON object JSONArray statuses = null; try { statuses = response.getJSONArray("statuses"); } catch (JSONException e) { e.printStackTrace(); } for (int i = 0 ; i < statuses.length(); i++) { try { // convert each object to a tweet model Tweet tweet = Tweet.fromJSON(statuses.getJSONObject(i)); // add that tweet model to our data source tweets.add(tweet); } catch (JSONException e) { e.printStackTrace(); } } tweetAdapter.addAll(tweets); // Now we call setRefreshing(false) to signal refresh has finished swipeContainer.setRefreshing(false); } public void onFailure(int statusCode, Header[] headers, Throwable throwable, JSONObject errorResponse) { Log.d("DEBUG", "Fetch timeline error: " + errorResponse.toString()); } }); } public void populateTimeline() { client.searchTweets(query, new JsonHttpResponseHandler() { @Override public void onSuccess(int statusCode, Header[] headers, JSONArray response) { super.onSuccess(statusCode, headers, response); Toast.makeText(getContext(), "what", Toast.LENGTH_LONG).show(); } @Override public void onSuccess(int statusCode, Header[] headers, JSONObject response) { // iterate through JSON array // for each entry, deserialize the JSON object JSONArray statuses = null; try { statuses = response.getJSONArray("statuses"); } catch (JSONException e) { e.printStackTrace(); } Log.e("search", statuses.toString()); for (int i = 0 ; i < statuses.length(); i++) { try { // convert each object to a tweet model Tweet tweet = Tweet.fromJSON(statuses.getJSONObject(i)); // add that tweet model to our data source tweets.add(tweet); // notify the adapter that we've added an item tweetAdapter.notifyItemInserted(tweets.size() - 1); } catch (JSONException e) { e.printStackTrace(); } } } @Override public void onFailure(int statusCode, Header[] headers, Throwable throwable, JSONObject errorResponse) { Log.d("TwitterClient", errorResponse.toString()); throwable.printStackTrace(); } @Override public void onFailure(int statusCode, Header[] headers, Throwable throwable, JSONArray errorResponse) { Log.d("TwitterClient", errorResponse.toString()); throwable.printStackTrace(); } @Override public void onFailure(int statusCode, Header[] headers, String responseString, Throwable throwable) { Log.d("TwitterClient", responseString); throwable.printStackTrace(); } }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.impl.logicalLayer; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.io.IOException; import org.apache.pig.data.DataType; import org.apache.pig.impl.logicalLayer.parser.ParseException; import org.apache.pig.impl.logicalLayer.schema.Schema; import org.apache.pig.impl.plan.Operator; import org.apache.pig.impl.plan.OperatorKey; import org.apache.pig.impl.plan.ProjectionMap; import org.apache.pig.impl.plan.RequiredFields; import org.apache.pig.impl.plan.VisitorException; import org.apache.pig.impl.util.Pair; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * Parent for all Logical operators. */ abstract public class LogicalOperator extends Operator<LOVisitor> { private static final long serialVersionUID = 2L; /** * Schema that defines the output of this operator. */ protected Schema mSchema = null; /** * A boolean variable to remember if the schema has been computed */ protected boolean mIsSchemaComputed = false; /** * Datatype of this output of this operator. Operators start out with data * type set to UNKNOWN, and have it set for them by the type checker. */ protected byte mType = DataType.UNKNOWN; /** * Requested level of parallelism for this operation. */ protected int mRequestedParallelism; /** * Name of the record set that results from this operator. */ protected String mAlias; /** * Logical plan that this operator is a part of. */ protected LogicalPlan mPlan; /** * ProjectionMap of this operator. */ protected ProjectionMap mProjectionMap; /** * A boolean variable to remember if the projection map has been computed */ protected boolean mIsProjectionMapComputed = false; /** * A HashSet to indicate whether an option (such a Join Type) was pinned * by the user or can be chosen at runtime by the optimizer. */ protected HashSet<Integer> mPinnedOptions = new HashSet<Integer>(); /** * Name of the customPartitioner if one is used, this is set to null otherwise. */ protected String mCustomPartitioner = null; public String getCustomPartitioner() { return mCustomPartitioner; } public void setCustomPartitioner(String customPartitioner) { this.mCustomPartitioner = customPartitioner; } private static Log log = LogFactory.getLog(LogicalOperator.class); /** * Equivalent to LogicalOperator(k, 0). * * @param plan * Logical plan this operator is a part of. * @param k * Operator key to assign to this node. */ public LogicalOperator(LogicalPlan plan, OperatorKey k) { this(plan, k, -1); } /** * @param plan * Logical plan this operator is a part of. * @param k Operator key to assign to this node. * @param rp degree of requested parallelism with which to execute this * node. */ public LogicalOperator(LogicalPlan plan, OperatorKey k, int rp) { super(k); mPlan = plan; mRequestedParallelism = rp; } /** * Get the operator key for this operator. */ public OperatorKey getOperatorKey() { return mKey; } /** * Set the output schema for this operator. If a schema already exists, an * attempt will be made to reconcile it with this new schema. * * @param schema * Schema to set. * @throws ParseException * if there is already a schema and the existing schema cannot * be reconciled with this new schema. */ public void setSchema(Schema schema) throws FrontendException { // In general, operators don't generate their schema until they're // asked, so ask them to do it. try { getSchema(); } catch (FrontendException ioe) { // It's fine, it just means we don't have a schema yet. } if (mSchema == null) { mSchema = schema; } else { mSchema.reconcile(schema); } } /** * Set the parent of the schema field in the schema hierarchy. Currently only used by * LOStream and LOLoad. * * @param schema the schema instance to set parent for */ protected final void setParent(Schema schema) { if( schema == null ) return; for( Schema.FieldSchema fs : schema.getFields() ) { fs.setParent( null, this ); setParent( fs.schema ); } } /** * Directly force the schema without reconcilation * Please use with great care * @param schema */ public void forceSchema(Schema schema) { this.mSchema = schema; } /** * Unset the schema as if it had not been calculated. This is used by * anyone who reorganizes the tree and needs to have schemas recalculated. */ public void unsetSchema() throws VisitorException { mIsSchemaComputed = false; mSchema = null; } /** * Regenerate the schema by unsetting and getting the schema */ public Schema regenerateSchema() throws FrontendException, VisitorException { unsetSchema(); return getSchema(); } /** * Calculate canonical names for all fields in the schema. This should * only be used for loads or other operators that create all new fields. */ public void setCanonicalNames() { for (Schema.FieldSchema fs : mSchema.getFields()) { fs.canonicalName = CanonicalNamer.getNewName(); } } /** * Get a copy of the schema for the output of this operator. */ public abstract Schema getSchema() throws FrontendException; /** * Set the type of this operator. This should only be called by the type * checking routines. * * @param t * Type to set this operator to. */ final public void setType(byte t) { mType = t; } /** * Get the type of this operator. */ public byte getType() { return mType; } public String getAlias() { return mAlias; } public String getAliasString() { return (mAlias == null) ? "" : (mAlias + ": "); } public void setAlias(String newAlias) { mAlias = newAlias; } public int getRequestedParallelism() { return mRequestedParallelism; } public void setRequestedParallelism(int newRequestedParallelism) { mRequestedParallelism = newRequestedParallelism; } public void pinOption(Integer opt) { mPinnedOptions.add(opt); } public boolean isPinnedOption(Integer opt) { return mPinnedOptions.contains(opt); } @Override public String toString() { StringBuffer msg = new StringBuffer(); msg.append("(Name: " + name() + " Operator Key: " + mKey + ")"); return msg.toString(); } /** * Given a schema, reconcile it with our existing schema. * * @param schema * Schema to reconcile with the existing. * @throws ParseException * if the reconciliation is not possible. */ protected void reconcileSchema(Schema schema) throws ParseException { if (mSchema == null) { mSchema = schema; return; } // TODO } /** * Visit this node with the provided visitor. This should only be called by * the visitor class itself, never directly. * * @param v * Visitor to visit with. * @throws VisitException * if the visitor has a problem. */ public abstract void visit(LOVisitor v) throws VisitorException; public LogicalPlan getPlan() { return mPlan ; } /** * Change the reference to the plan for this operator. Don't use this * unless you're sure you know what you're doing. */ public void setPlan(LogicalPlan plan) { mPlan = plan; } /*** * IMPORTANT: * This method is only used for unit testing purpose. */ public void setSchemaComputed(boolean computed) { mIsSchemaComputed = computed ; } @Override public boolean supportsMultipleOutputs() { return true; } /** * @see org.apache.pig.impl.plan.Operator#clone() * Do not use the clone method directly. Operators are cloned when logical plans * are cloned using {@link LogicalPlanCloner} */ @Override protected Object clone() throws CloneNotSupportedException { LogicalOperator loClone = (LogicalOperator)super.clone(); if(mSchema != null) loClone.mSchema = this.mSchema.clone(); return loClone; } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.opsworks.model; import java.io.Serializable; /** * <p> * Contains the data needed by RDP clients such as the Microsoft Remote Desktop * Connection to log in to the instance. * </p> */ public class TemporaryCredential implements Serializable, Cloneable { /** * <p> * The user name. * </p> */ private String username; /** * <p> * The password. * </p> */ private String password; /** * <p> * The length of time (in minutes) that the grant is valid. When the grant * expires, at the end of this period, the user will no longer be able to * use the credentials to log in. If they are logged in at the time, they * will be automatically logged out. * </p> */ private Integer validForInMinutes; /** * <p> * The instance's AWS OpsWorks ID. * </p> */ private String instanceId; /** * <p> * The user name. * </p> * * @param username * The user name. */ public void setUsername(String username) { this.username = username; } /** * <p> * The user name. * </p> * * @return The user name. */ public String getUsername() { return this.username; } /** * <p> * The user name. * </p> * * @param username * The user name. * @return Returns a reference to this object so that method calls can be * chained together. */ public TemporaryCredential withUsername(String username) { setUsername(username); return this; } /** * <p> * The password. * </p> * * @param password * The password. */ public void setPassword(String password) { this.password = password; } /** * <p> * The password. * </p> * * @return The password. */ public String getPassword() { return this.password; } /** * <p> * The password. * </p> * * @param password * The password. * @return Returns a reference to this object so that method calls can be * chained together. */ public TemporaryCredential withPassword(String password) { setPassword(password); return this; } /** * <p> * The length of time (in minutes) that the grant is valid. When the grant * expires, at the end of this period, the user will no longer be able to * use the credentials to log in. If they are logged in at the time, they * will be automatically logged out. * </p> * * @param validForInMinutes * The length of time (in minutes) that the grant is valid. When the * grant expires, at the end of this period, the user will no longer * be able to use the credentials to log in. If they are logged in at * the time, they will be automatically logged out. */ public void setValidForInMinutes(Integer validForInMinutes) { this.validForInMinutes = validForInMinutes; } /** * <p> * The length of time (in minutes) that the grant is valid. When the grant * expires, at the end of this period, the user will no longer be able to * use the credentials to log in. If they are logged in at the time, they * will be automatically logged out. * </p> * * @return The length of time (in minutes) that the grant is valid. When the * grant expires, at the end of this period, the user will no longer * be able to use the credentials to log in. If they are logged in * at the time, they will be automatically logged out. */ public Integer getValidForInMinutes() { return this.validForInMinutes; } /** * <p> * The length of time (in minutes) that the grant is valid. When the grant * expires, at the end of this period, the user will no longer be able to * use the credentials to log in. If they are logged in at the time, they * will be automatically logged out. * </p> * * @param validForInMinutes * The length of time (in minutes) that the grant is valid. When the * grant expires, at the end of this period, the user will no longer * be able to use the credentials to log in. If they are logged in at * the time, they will be automatically logged out. * @return Returns a reference to this object so that method calls can be * chained together. */ public TemporaryCredential withValidForInMinutes(Integer validForInMinutes) { setValidForInMinutes(validForInMinutes); return this; } /** * <p> * The instance's AWS OpsWorks ID. * </p> * * @param instanceId * The instance's AWS OpsWorks ID. */ public void setInstanceId(String instanceId) { this.instanceId = instanceId; } /** * <p> * The instance's AWS OpsWorks ID. * </p> * * @return The instance's AWS OpsWorks ID. */ public String getInstanceId() { return this.instanceId; } /** * <p> * The instance's AWS OpsWorks ID. * </p> * * @param instanceId * The instance's AWS OpsWorks ID. * @return Returns a reference to this object so that method calls can be * chained together. */ public TemporaryCredential withInstanceId(String instanceId) { setInstanceId(instanceId); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getUsername() != null) sb.append("Username: " + getUsername() + ","); if (getPassword() != null) sb.append("Password: " + getPassword() + ","); if (getValidForInMinutes() != null) sb.append("ValidForInMinutes: " + getValidForInMinutes() + ","); if (getInstanceId() != null) sb.append("InstanceId: " + getInstanceId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof TemporaryCredential == false) return false; TemporaryCredential other = (TemporaryCredential) obj; if (other.getUsername() == null ^ this.getUsername() == null) return false; if (other.getUsername() != null && other.getUsername().equals(this.getUsername()) == false) return false; if (other.getPassword() == null ^ this.getPassword() == null) return false; if (other.getPassword() != null && other.getPassword().equals(this.getPassword()) == false) return false; if (other.getValidForInMinutes() == null ^ this.getValidForInMinutes() == null) return false; if (other.getValidForInMinutes() != null && other.getValidForInMinutes().equals( this.getValidForInMinutes()) == false) return false; if (other.getInstanceId() == null ^ this.getInstanceId() == null) return false; if (other.getInstanceId() != null && other.getInstanceId().equals(this.getInstanceId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getUsername() == null) ? 0 : getUsername().hashCode()); hashCode = prime * hashCode + ((getPassword() == null) ? 0 : getPassword().hashCode()); hashCode = prime * hashCode + ((getValidForInMinutes() == null) ? 0 : getValidForInMinutes().hashCode()); hashCode = prime * hashCode + ((getInstanceId() == null) ? 0 : getInstanceId().hashCode()); return hashCode; } @Override public TemporaryCredential clone() { try { return (TemporaryCredential) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package db; import db.util.JsfUtil; import db.util.PaginationHelper; import java.io.Serializable; import java.util.ResourceBundle; import javax.ejb.EJB; import javax.inject.Named; import javax.enterprise.context.SessionScoped; import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.faces.convert.Converter; import javax.faces.convert.FacesConverter; import javax.faces.model.DataModel; import javax.faces.model.ListDataModel; import javax.faces.model.SelectItem; @Named("receiveDetailController") @SessionScoped public class ReceiveDetailController implements Serializable { private ReceiveDetail current; private DataModel items = null; @EJB private db.ReceiveDetailFacade ejbFacade; private PaginationHelper pagination; private int selectedItemIndex; public ReceiveDetailController() { } public ReceiveDetail getSelected() { if (current == null) { current = new ReceiveDetail(); selectedItemIndex = -1; } return current; } private ReceiveDetailFacade getFacade() { return ejbFacade; } public PaginationHelper getPagination() { if (pagination == null) { pagination = new PaginationHelper(10) { @Override public int getItemsCount() { return getFacade().count(); } @Override public DataModel createPageDataModel() { return new ListDataModel(getFacade().findRange(new int[]{getPageFirstItem(), getPageFirstItem() + getPageSize()})); } }; } return pagination; } public String prepareList() { recreateModel(); return "List"; } public String prepareView() { current = (ReceiveDetail) getItems().getRowData(); selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex(); return "View"; } public String prepareCreate() { current = new ReceiveDetail(); selectedItemIndex = -1; return "Create"; } public String create() { try { getFacade().create(current); JsfUtil.addSuccessMessage(ResourceBundle.getBundle("/Bundle").getString("ReceiveDetailCreated")); return prepareCreate(); } catch (Exception e) { JsfUtil.addErrorMessage(e, ResourceBundle.getBundle("/Bundle").getString("PersistenceErrorOccured")); return null; } } public String prepareEdit() { current = (ReceiveDetail) getItems().getRowData(); selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex(); return "Edit"; } public String update() { try { getFacade().edit(current); JsfUtil.addSuccessMessage(ResourceBundle.getBundle("/Bundle").getString("ReceiveDetailUpdated")); return "View"; } catch (Exception e) { JsfUtil.addErrorMessage(e, ResourceBundle.getBundle("/Bundle").getString("PersistenceErrorOccured")); return null; } } public String destroy() { current = (ReceiveDetail) getItems().getRowData(); selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex(); performDestroy(); recreatePagination(); recreateModel(); return "List"; } public String destroyAndView() { performDestroy(); recreateModel(); updateCurrentItem(); if (selectedItemIndex >= 0) { return "View"; } else { // all items were removed - go back to list recreateModel(); return "List"; } } private void performDestroy() { try { getFacade().remove(current); JsfUtil.addSuccessMessage(ResourceBundle.getBundle("/Bundle").getString("ReceiveDetailDeleted")); } catch (Exception e) { JsfUtil.addErrorMessage(e, ResourceBundle.getBundle("/Bundle").getString("PersistenceErrorOccured")); } } private void updateCurrentItem() { int count = getFacade().count(); if (selectedItemIndex >= count) { // selected index cannot be bigger than number of items: selectedItemIndex = count - 1; // go to previous page if last page disappeared: if (pagination.getPageFirstItem() >= count) { pagination.previousPage(); } } if (selectedItemIndex >= 0) { current = getFacade().findRange(new int[]{selectedItemIndex, selectedItemIndex + 1}).get(0); } } public DataModel getItems() { if (items == null) { items = getPagination().createPageDataModel(); } return items; } private void recreateModel() { items = null; } private void recreatePagination() { pagination = null; } public String next() { getPagination().nextPage(); recreateModel(); return "List"; } public String previous() { getPagination().previousPage(); recreateModel(); return "List"; } public SelectItem[] getItemsAvailableSelectMany() { return JsfUtil.getSelectItems(ejbFacade.findAll(), false); } public SelectItem[] getItemsAvailableSelectOne() { return JsfUtil.getSelectItems(ejbFacade.findAll(), true); } public ReceiveDetail getReceiveDetail(java.lang.String id) { return ejbFacade.find(id); } @FacesConverter(forClass = ReceiveDetail.class) public static class ReceiveDetailControllerConverter implements Converter { @Override public Object getAsObject(FacesContext facesContext, UIComponent component, String value) { if (value == null || value.length() == 0) { return null; } ReceiveDetailController controller = (ReceiveDetailController) facesContext.getApplication().getELResolver(). getValue(facesContext.getELContext(), null, "receiveDetailController"); return controller.getReceiveDetail(getKey(value)); } java.lang.String getKey(String value) { java.lang.String key; key = value; return key; } String getStringKey(java.lang.String value) { StringBuilder sb = new StringBuilder(); sb.append(value); return sb.toString(); } @Override public String getAsString(FacesContext facesContext, UIComponent component, Object object) { if (object == null) { return null; } if (object instanceof ReceiveDetail) { ReceiveDetail o = (ReceiveDetail) object; return getStringKey(o.getReceiveID()); } else { throw new IllegalArgumentException("object " + object + " is of type " + object.getClass().getName() + "; expected type: " + ReceiveDetail.class.getName()); } } } }
/* * Copyright 2016-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.vault.core; import java.io.File; import java.io.InputStream; import java.math.BigInteger; import java.security.cert.CertificateFactory; import java.security.cert.X509CRL; import java.security.cert.X509Certificate; import java.time.Duration; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.assertj.core.util.Files; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.util.StreamUtils; import org.springframework.vault.VaultException; import org.springframework.vault.core.VaultPkiOperations.Encoding; import org.springframework.vault.support.Certificate; import org.springframework.vault.support.CertificateBundle; import org.springframework.vault.support.VaultCertificateRequest; import org.springframework.vault.support.VaultCertificateResponse; import org.springframework.vault.support.VaultSignCertificateRequestResponse; import org.springframework.vault.util.IntegrationTestSupport; import org.springframework.vault.util.RequiresVaultVersion; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatExceptionOfType; import static org.springframework.vault.util.Settings.findWorkDir; /** * Integration tests for {@link VaultPkiTemplate} through {@link VaultPkiOperations}. * * @author Mark Paluch */ @ExtendWith(SpringExtension.class) @ContextConfiguration(classes = VaultIntegrationTestConfiguration.class) class VaultPkiTemplateIntegrationTests extends IntegrationTestSupport { static final String NO_TTL_UNIT_REQUIRED_FROM = "0.7.3"; @Autowired VaultOperations vaultOperations; VaultPkiOperations pkiOperations; @BeforeEach void before() { pkiOperations = vaultOperations.opsForPki(); if (!prepare().hasSecret("pki")) { prepare().mountSecret("pki"); } File workDir = findWorkDir(new File(System.getProperty("user.dir"))); String cert = Files.contentOf(new File(workDir, "ca/certs/intermediate.cert.pem"), "US-ASCII"); String key = Files.contentOf( new File(workDir, "ca/private/intermediate.decrypted.key.pem"), "US-ASCII"); Map<String, String> pembundle = Collections.singletonMap("pem_bundle", cert + key); vaultOperations.write("pki/config/ca", pembundle); Map<String, String> role = new HashMap<String, String>(); role.put("allowed_domains", "localhost,example.com"); role.put("allow_subdomains", "true"); role.put("allow_localhost", "true"); role.put("allow_ip_sans", "true"); role.put("max_ttl", "72h"); vaultOperations.write("pki/roles/testrole", role); } @Test void issueCertificateShouldCreateCertificate() { VaultCertificateRequest request = VaultCertificateRequest .create("hello.example.com"); VaultCertificateResponse certificateResponse = pkiOperations .issueCertificate("testrole", request); CertificateBundle data = certificateResponse.getRequiredData(); assertThat(data.getPrivateKey()).isNotEmpty(); assertThat(data.getCertificate()).isNotEmpty(); assertThat(data.getIssuingCaCertificate()).isNotEmpty(); assertThat(data.getSerialNumber()).isNotEmpty(); assertThat(data.getX509Certificate().getSubjectX500Principal().getName()) .isEqualTo("CN=hello.example.com"); } @Test @RequiresVaultVersion(NO_TTL_UNIT_REQUIRED_FROM) void issueCertificateWithTtlShouldCreateCertificate() { VaultCertificateRequest request = VaultCertificateRequest.builder() .ttl(Duration.ofHours(48)).commonName("hello.example.com").build(); VaultCertificateResponse certificateResponse = pkiOperations .issueCertificate("testrole", request); X509Certificate certificate = certificateResponse.getRequiredData() .getX509Certificate(); Instant now = Instant.now(); assertThat(certificate.getNotAfter()) .isAfter(Date.from(now.plus(40, ChronoUnit.HOURS))) .isBefore(Date.from(now.plus(50, ChronoUnit.HOURS))); } @Test void signShouldSignCsr() { String csr = "-----BEGIN CERTIFICATE REQUEST-----\n" + "MIICzTCCAbUCAQAwgYcxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpTb21lLVN0YXRl\n" + "MRUwEwYDVQQHEwxTYW4gVmF1bHRpbm8xFTATBgNVBAoTDFNwcmluZyBWYXVsdDEY\n" + "MBYGA1UEAxMPY3NyLmV4YW1wbGUuY29tMRswGQYJKoZIhvcNAQkBFgxzcHJpbmdA\n" + "dmF1bHQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVlDBT1gAONIp4\n" + "GQQ7BWDeqNzlscWqu5oQyfvw6oNFZzYWGVTgX/n72biv8d1Wx30MWpVYhbL0mk9m\n" + "Uu15elMZHPb4F4bk8VDSiB9527SwAd/QpkNC1RsPp2h6g2LvGPJ2eidHSlLtF2To\n" + "A4i6z0K0++nvYKSf9Af0sod2Z51xc9uPj/oN5z/8BQuGoCBpxJqgl7N/csMICixY\n" + "2fQcCUbdPPqE9INIInUHe3mPE/yvxko9aYGZ5jnrdZyiQaRRKBdWpvbRLKXQ78Fz\n" + "vXR3G33yn9JAN6wl1A916DiXzy2xHT19vyAn1hBUj2M6KFXChQ30oxTyTOqHCMLP\n" + "m/BSEOsPAgMBAAGgADANBgkqhkiG9w0BAQsFAAOCAQEAYFssueiUh3YGxnXcQ4dp\n" + "ZqVWeVyOuGGaFJ4BA0drwJ9Mt/iNmPUTGE2oBNnh2R7e7HwGcNysFHZZOZBEQ0Hh\n" + "Vn93GO7cfaTOetK0VtDqis1VFQD0eVPWf5s6UqT/+XGrFRhwJ9hM+2FQSrUDFecs\n" + "+/605n1rD7qOj3vkGrtwvEUrxyRaQaKpPLHmVHENqV6F1NsO3Z27f2FWWAZF2VKN\n" + "cCQQJNc//DbIN3J3JSElpIDBDHctoBoQVnMiwpCbSA+CaAtlWYJKnAfhTKeqnNMy\n" + "qf3ACZ+1sBIuqSP7dEJ2KfIezaCPQ88+PAloRB52LFa+iq3yI7F5VzkwAvQFnTi+\n" + "cQ==\n" + "-----END CERTIFICATE REQUEST-----"; VaultCertificateRequest request = VaultCertificateRequest .create("hello.example.com"); VaultSignCertificateRequestResponse certificateResponse = pkiOperations .signCertificateRequest("testrole", csr, request); Certificate data = certificateResponse.getRequiredData(); assertThat(data.getCertificate()).isNotEmpty(); assertThat(data.getIssuingCaCertificate()).isNotEmpty(); assertThat(data.getSerialNumber()).isNotEmpty(); assertThat(data.getX509Certificate().getSubjectX500Principal().getName()) .isEqualTo("CN=csr.example.com"); assertThat(data.createTrustStore()).isNotNull(); } @Test void issueCertificateFail() { VaultCertificateRequest request = VaultCertificateRequest.create("not.supported"); assertThatExceptionOfType(VaultException.class) .isThrownBy(() -> pkiOperations.issueCertificate("testrole", request)); } @Test void shouldRevokeCertificate() throws Exception { VaultCertificateRequest request = VaultCertificateRequest .create("foo.example.com"); VaultCertificateResponse certificateResponse = pkiOperations .issueCertificate("testrole", request); BigInteger serial = new BigInteger(certificateResponse.getRequiredData() .getSerialNumber().replaceAll("\\:", ""), 16); pkiOperations.revoke(certificateResponse.getRequiredData().getSerialNumber()); try (InputStream in = pkiOperations.getCrl(Encoding.DER)) { CertificateFactory cf = CertificateFactory.getInstance("X.509"); X509CRL crl = (X509CRL) cf.generateCRL(in); assertThat(crl.getRevokedCertificate(serial)).isNotNull(); } } @Test void shouldReturnCrl() throws Exception { try (InputStream in = pkiOperations.getCrl(Encoding.DER)) { CertificateFactory cf = CertificateFactory.getInstance("X.509"); assertThat(cf.generateCRL(in)).isInstanceOf(X509CRL.class); } try (InputStream crl = pkiOperations.getCrl(Encoding.PEM)) { byte[] bytes = StreamUtils.copyToByteArray(crl); assertThat(bytes).isNotEmpty(); } } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.apps.slideshow; import java.awt.Color; import java.awt.Cursor; import java.awt.Dimension; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Point; import java.awt.Rectangle; import java.awt.Toolkit; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.image.BufferedImage; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.List; import java.util.ArrayList; import javax.swing.JComponent; import javax.swing.JWindow; import org.apache.batik.bridge.BridgeContext; import org.apache.batik.bridge.DocumentLoader; import org.apache.batik.bridge.GVTBuilder; import org.apache.batik.bridge.UserAgent; import org.apache.batik.bridge.UserAgentAdapter; import org.apache.batik.bridge.ViewBox; import org.apache.batik.gvt.GraphicsNode; import org.apache.batik.gvt.renderer.StaticRenderer; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.svg.SVGDocument; /** * * @version $Id$ */ public class Main extends JComponent { StaticRenderer renderer; UserAgent userAgent; DocumentLoader loader; BridgeContext ctx; BufferedImage image; BufferedImage display; File [] files; static int duration = 3000; static int frameDelay = duration+7000; volatile boolean done = false; public Main(File []files, Dimension size) { setBackground(Color.black); this.files = files; UserAgentAdapter ua = new UserAgentAdapter(); renderer = new StaticRenderer(); userAgent = ua; loader = new DocumentLoader(userAgent); ctx = new BridgeContext(userAgent, loader); ua.setBridgeContext(ctx); if (size == null) { size = Toolkit.getDefaultToolkit().getScreenSize(); } setPreferredSize(size); setDoubleBuffered(false); addMouseListener(new MouseAdapter() { public void mouseClicked(MouseEvent me) { if (done) System.exit(0); else togglePause(); } }); size.width += 2; size.height += 2; display = new BufferedImage(size.width, size.height, BufferedImage.TYPE_INT_BGR); Thread t = new RenderThread(); t.start(); JWindow w = new JWindow(); w.setBackground(Color.black); w.getContentPane().setBackground(Color.black); w.getContentPane().add(this); w.pack(); w.setLocation(new Point(-1, -1)); w.setVisible(true); } class RenderThread extends Thread { RenderThread(){ super("RenderThread"); setDaemon( true ); } public void run() { renderer.setDoubleBuffered(true); for (int i=0; i<files.length; i++) { GraphicsNode gvtRoot = null; GVTBuilder builder = new GVTBuilder(); try { String fileName = files[ i ].toURL().toString(); System.out.println("Reading: " + fileName ); Document svgDoc = loader.loadDocument( fileName ); System.out.println("Building: " + fileName ); gvtRoot = builder.build(ctx, svgDoc); System.out.println("Rendering: " + fileName ); renderer.setTree(gvtRoot); Element elt = ((SVGDocument)svgDoc).getRootElement(); renderer.setTransform (ViewBox.getViewTransform (null, elt, display.getWidth(), display.getHeight(), ctx)); renderer.updateOffScreen(display.getWidth(), display.getHeight()); Rectangle r = new Rectangle(0, 0, display.getWidth(), display.getHeight()); renderer.repaint(r); System.out.println("Painting: " + fileName ); image = renderer.getOffScreen(); setTransition(image); } catch (Exception ex) { ex.printStackTrace(); } } if (transitionThread != null) { try { transitionThread.join(); } catch (InterruptedException ie) { } done = true; setCursor(new Cursor(Cursor.WAIT_CURSOR)); } } } volatile Thread transitionThread = null; public void setTransition(BufferedImage newImg) { synchronized (this) { while (transitionThread != null) { try { wait(); } catch (InterruptedException ie) { } } transitionThread = new TransitionThread(newImg); transitionThread.start(); } } long startLastTransition=0; volatile boolean paused = false; public void togglePause() { synchronized(this) { paused = !paused; Cursor c; if (paused) { c = new Cursor(Cursor.WAIT_CURSOR); } else { c = new Cursor(Cursor.DEFAULT_CURSOR); if (transitionThread != null) { synchronized (transitionThread) { transitionThread.notifyAll(); } } } setCursor(c); } } class TransitionThread extends Thread { BufferedImage src; int blockw = 75; int blockh = 75; public TransitionThread(BufferedImage bi) { super( "TransitionThread"); setDaemon( true ); src = bi; } public void run() { int xblocks = (display.getWidth()+blockw-1)/blockw; int yblocks = (display.getHeight()+blockh-1)/blockh; int nblocks = xblocks*yblocks; int tblock = duration/nblocks; Point [] rects = new Point[nblocks]; for (int y=0; y<yblocks; y++) for (int x=0; x<xblocks; x++) rects[y*xblocks+x] = new Point(x, y); Graphics2D g2d = display.createGraphics(); g2d.setColor( Color.black ); long currTrans = System.currentTimeMillis(); while ((currTrans-startLastTransition) < frameDelay) { try { long stime = frameDelay-(currTrans-startLastTransition); if (stime > 500) { System.gc(); currTrans = System.currentTimeMillis(); stime = frameDelay-(currTrans-startLastTransition); } if (stime > 0) sleep(stime); } catch (InterruptedException ie) { } currTrans = System.currentTimeMillis(); } synchronized(this) { while (paused) { try { wait(); } catch (InterruptedException ie) { } } } long last = startLastTransition = System.currentTimeMillis(); for (int i=0; i<rects.length; i++) { int idx = (int)(Math.random()*(rects.length-i)); Point pt = rects[idx]; System.arraycopy( rects, idx + 1, rects, idx + 1 - 1, rects.length - i - idx -1 ); // +1?? int x=pt.x*blockw, y=pt.y*blockh; int w=blockw, h = blockh; if (x+w > src.getWidth()) w = src.getWidth()-x; if (y+h > src.getHeight()) h = src.getHeight()-y; synchronized (display) { g2d.fillRect(x, y, w, h); BufferedImage sub; sub = src.getSubimage(x, y, w, h); g2d.drawImage(sub, null, x, y); } repaint(x, y, w, h); long current = System.currentTimeMillis(); try { long dt = current-last; if (dt < tblock) sleep(tblock-dt); } catch (InterruptedException ie) { } last = current; } synchronized (Main.this) { transitionThread = null; Main.this.notifyAll(); } } } public void paint(Graphics g) { Graphics2D g2d = (Graphics2D)g; if (display == null) return; // System.out.println("Drawing Image: " + display); g2d.drawImage(display, null, 0, 0); } public static void readFileList(String file, List fileVec) { BufferedReader br; try { br = new BufferedReader(new FileReader(file)); } catch(FileNotFoundException fnfe) { System.err.println("Unable to open file-list: " + file); return; } try { URL flURL = new File(file).toURL(); String line; while ((line = br.readLine()) != null) { String str = line; int idx = str.indexOf('#'); if (idx != -1) str = str.substring(0, idx); str = str.trim(); if (str.length() == 0) continue; try { URL imgURL = new URL(flURL, str); fileVec.add(imgURL.getFile()); } catch (MalformedURLException mue) { System.err.println("Can't make sense of line:\n " + line); } } } catch (IOException ioe) { System.err.println("Error while reading file-list: " + file); } finally { try { br.close(); } catch (IOException ioe) { } } } public static void main(String []args) { List fileVec = new ArrayList(); Dimension d = null; if (args.length == 0) { showUsage(); return; } for (int i=0; i<args.length; i++) { if ((args[i].equals("-h")) || (args[i].equals("-help")) || (args[i].equals("--help"))){ showUsage(); return; } else if (args[i].equals("--")) { i++; while(i < args.length) { fileVec.add(args[i++]); } break; } else if ((args[i].equals("-fl"))|| (args[i].equals("--file-list"))) { if (i+1 == args.length) { System.err.println ("Must provide name of file list file after " + args[i]); break; } readFileList(args[i+1], fileVec); i++; } else if ((args[i].equals("-ft"))|| (args[i].equals("--frame-time"))) { if (i+1 == args.length) { System.err.println ("Must provide time in millis after " + args[i]); break; } try { frameDelay = Integer.decode(args[i+1]).intValue(); i++; } catch (NumberFormatException nfe) { System.err.println ("Can't parse frame time: " + args[i+1]); } } else if ((args[i].equals("-tt"))|| (args[i].equals("--transition-time"))) { if (i+1 == args.length) { System.err.println ("Must provide time in millis after " + args[i]); break; } try { duration = Integer.decode(args[i+1]).intValue(); i++; } catch (NumberFormatException nfe) { System.err.println ("Can't parse transition time: " + args[i+1]); } } else if ((args[i].equals("-ws"))|| (args[i].equals("--window-size"))) { if (i+1 == args.length) { System.err.println ("Must provide window size [w,h] after " + args[i]); break; } try { int idx = args[i+1].indexOf(','); int w, h; if (idx == -1) w = h = Integer.decode(args[i+1]).intValue(); else { String wStr = args[i+1].substring(0,idx); String hStr = args[i+1].substring(idx+1); w = Integer.decode(wStr).intValue(); h = Integer.decode(hStr).intValue(); } d = new Dimension(w, h); i++; } catch (NumberFormatException nfe) { System.err.println ("Can't parse window size: " + args[i+1]); } } else fileVec.add(args[i]); } File [] files = new File[fileVec.size()]; for (int i=0; i<fileVec.size(); i++) { try { files[i] = new File((String)fileVec.get(i)); } catch (Exception ex) { ex.printStackTrace(); } } new Main(files, d); } public static void showUsage() { System.out.println ("Options:\n" + " -- : Remaining args are file names\n" + " -fl <file>\n" + " --file-list <file> : file contains list of images to\n" + " show one per line\n" + " -ws <width>[,<height>]\n" + " -window-size <width>[,<height>] : Set the size of slideshow window\n" + " defaults to full screen\n" + " -ft <int>\n" + " --frame-time <int> : Amount of time in millisecs to\n" + " show each frame.\n" + " Includes transition time.\n" + " -tt <int>\n" + " --transition-time <int> : Amount of time in millisecs to\n" + " transition between frames.\n" + " <file> : SVG file to display"); } }
package com.chrysler.mhacks6demo; import com.openxc.units.Celsius; import com.openxc.units.Coordinate; import com.openxc.units.Degree; import com.openxc.units.Kilometer; import com.openxc.units.KilometersPerHour; import com.openxc.units.KilopascalGauge; import com.openxc.units.Liter; import com.openxc.units.MetersPerSecondSquared; import com.openxc.units.NewtonMeter; import com.openxc.units.PSI; import com.openxc.units.Percentage; import com.openxc.units.RotationsPerMinute; import com.openxc.units.Volt; import com.openxc.wrappers.BrakePedalPosition; import com.openxc.wrappers.GearPosition; import com.openxc.wrappers.IgnitionPosition; import com.openxc.wrappers.PaddleShifterPosition; import com.openxc.wrappers.SwitchState; import com.openxc.wrappers.TireStatus; import com.openxc.wrappers.TurnSignalPosition; /** * Car data packet containing all vehicle information from the CAN bus * @author Ray, Michael (FCA) * @version 1 * @since 06/11/2015 */ public class CarDataPacket { private Engine engine; private Tire tire; private Infotainment infotainment; private Transmission transmission; private Cluster cluster; private Diagnostic diagnostic; private Lighting lighting; private UserControl userControl; public CarDataPacket() { tire = new Tire(); engine = new Engine(); infotainment = new Infotainment(); transmission = new Transmission(); cluster = new Cluster(); diagnostic = new Diagnostic(); lighting = new Lighting(); userControl = new UserControl(); } /** * Get infotainment information * @return infotainment object */ public Infotainment getInfotainment() { return this.infotainment; } /** * Set infotainment information * @param infotainment infotainment object */ public void setInfotainment(Infotainment infotainment) { this.infotainment = infotainment; } /** * Get engine information * @return engine object */ public Engine getEngine() { return this.engine; } /** * Set engine information * @param engine engine object */ public void setEngine(Engine engine) { this.engine = engine; } /** * Get transmission information * @return transmission object */ public Transmission getTransmission() { return this.transmission; } /** * Set transmission information * @param transmission transmission object */ public void setTransmission(Transmission transmission) { this.transmission = transmission; } /** * Get tire information * @return tire object */ public Tire getTire() { return this.tire; } /** * Set tire information * @param tire tire object */ public void setTire(Tire tire) { this.tire = tire; } /** * Get cluster information * @return cluster object */ public Cluster getCluster() { return this.cluster; } /** * Set cluster information * @param cluster cluster object */ public void setCluster(Cluster cluster) { this.cluster = cluster; } /** * Get diagnostic information * @return diagnostic object */ public Diagnostic getDiagnostic() { return this.diagnostic; } /** * Set diagnostic information * @param diagnostic diagnostic object */ public void setDiagnostic(Diagnostic diagnostic) { this.diagnostic = diagnostic; } /** * Get lighting information * @return lighting object */ public Lighting getLighting() { return this.lighting; } /** * Set lighting information * @param lighting lighting object */ public void setLighting(Lighting lighting) { this.lighting = lighting; } /** * Get user control information * @return user control object */ public UserControl getUserControl() { return this.userControl; } /** * Set user control information * @param userControl user control object */ public void setUserControl(UserControl userControl) { this.userControl = userControl; } /** * <b>Engine class</b><br> * Contains engine coolant temperature, engine oil temperature, and engine speed * * @see Celsius * @see RotationsPerMinute */ public class Engine { private Celsius engineCoolTemp, engineOilTemp; private RotationsPerMinute engineSpeed; /** * Initialize sensor data to defaults */ public Engine() { engineCoolTemp = new Celsius(0); engineOilTemp = new Celsius(0); engineSpeed = new RotationsPerMinute(0); } /** * Get engine coolant temperature * @return engine coolant temp in degrees celsius */ public Celsius getEngineCoolTemp() { return engineCoolTemp; } /** * Set engine coolant temperature * @param engineCoolTemp engine coolant temp in degrees celsius */ public void setEngineCoolTemp(Celsius engineCoolTemp) { this.engineCoolTemp = engineCoolTemp; } /** * Get engine oil temperature * @return engine oil temp in degrees celsius */ public Celsius getEngineOilTemp() { return engineOilTemp; } /** * Set engine oil temperature * @param engineOilTemp engine oil temp in degrees celsius */ public void setEngineOilTemp(Celsius engineOilTemp) { this.engineOilTemp = engineOilTemp; } /** * Get engine speed * @return engine speed in rotations per min */ public RotationsPerMinute getEngineSpeed() { return engineSpeed; } /** * Set engine speed * @param engineSpeed engine speed in rotations per min */ public void setEngineSpeed(RotationsPerMinute engineSpeed) { this.engineSpeed = engineSpeed; } /** * Converts the engine object to a string<br> * <pre> Engine coolant temperature: 0 C<br> Engine oil temperature: 0 C<br> Engine speed: 0 RPM</pre> * @return Engine string */ @Override public String toString() { return "Engine coolant temperature: " + engineCoolTemp.toString() + "\nEngine oil temperature: " + engineCoolTemp.toString() + "\nEngine speed: " + engineSpeed.toString(); } } /** * <b>Tire class</b><br> * Contains tire pressures and tire sensor statuses * * @see PSI * @see TireStatus */ public class Tire { private PSI[] tirePressure; private TireStatus[] tireStatus; public static final int FRONTLEFT = 0, FRONTRIGHT = 1, REARLEFT = 2, REARRIGHT = 3; /** * Initialize sensor data to defaults */ public Tire() { tirePressure = new PSI[4]; for (int i=0; i<4; i++) tirePressure[i] = new PSI(0); tireStatus = new TireStatus[4]; for (int i=0; i<4; i++) tireStatus[i] = TireStatus.SNA; } /** * Get array of tire pressures * @return array of PSIs */ public PSI[] getTirePressure() { return tirePressure; } /** * Set array of tire pressures * @param tirePressure array of tire pressures in psi */ public void setTirePressure(PSI[] tirePressure) { this.tirePressure = tirePressure; } /** * Set a tire pressure * @param tirePressure tire pressure in psi * @param index desired tire */ public void setTirePressure(PSI tirePressure, int index) { this.tirePressure[index] = tirePressure; } /** * Get array of tire pressure sensor statuses * @return array of tire sensor statuses */ public TireStatus[] getTireStatus() { return tireStatus; } /** * Set array of tire pressure sensor statuses * @param tireStatus array of tire sensor statuses */ public void setTireStatus(TireStatus[] tireStatus) { this.tireStatus = tireStatus; } /** * Set a tire pressure sensor status * @param tireStatus tire sensor status * @param index desired tire */ public void setTireStatus(TireStatus tireStatus, int index) { this.tireStatus[index] = tireStatus; } /** * Converts the tire object to a string<br> * <pre> Tire pressures: FrontLeft, FrontRight, RearLeft, RearRight<br> Tire sensors: FrontLeft, FrontRight, RearLeft, RearRight</pre> * @return Tire string */ @Override public String toString() { return "Tire pressures: " + tirePressure[0].toString() + ", " + tirePressure[1].toString() + ", " + tirePressure[2].toString() + ", " + tirePressure[3].toString() + "\nTire sensors: " + tireStatus[0].toString() + ", " + tireStatus[1].toString() + ", " + tireStatus[2].toString() + ", " + tireStatus[3].toString(); } } /** * <b>Infotainment class</b><br> * Contains screen coordinates * * @see Coordinate * @see SwitchState */ public class Infotainment { private Coordinate screenX, screenY; private SwitchState vol, seek; /** * Initialize sensor data to defaults */ public Infotainment() { screenX = new Coordinate(0); screenY = new Coordinate(0); vol = SwitchState.SNA; seek = SwitchState.SNA; } /** * Get screen X touch coordinate * @return x coordinate in pixels */ public Coordinate getScreenX() { return screenX; } /** * Set screen X touch coordinate * @param screenX x coordinate in pixels */ public void setScreenX(Coordinate screenX) { this.screenX = screenX; } /** * Get screen Y touch coordinate * @return y coordinate in pixels */ public Coordinate getScreenY() { return screenY; } /** * Set screen Y touch coordinate * @param screenY y coordinate in pixels */ public void setScreenY(Coordinate screenY) { this.screenY = screenY; } /** * Get volume switch state * @return volume switch state */ public SwitchState getVol() { return vol; } /** * Set volume switch state * @param vol volume switch state */ public void setVol(SwitchState vol) { this.vol = vol; } /** * Get seek (radio tuner) switch state * @return seek (radio tuner) switch state */ public SwitchState getSeek() { return seek; } /** * Set seek (radio tuner) switch state * @param seek seek (radio tuner) switch state */ public void setSeek(SwitchState seek) { this.seek = seek; } /** * Converts the infotainment object to a string<br> * <pre> X touch coordinate: 0 pixels<br> Y touch coordinate: 0 pixels * Volume switch state: SNA<br> Seek switch state: SNA</pre> * @return Infotainment string */ @Override public String toString() { return "X touch coordinate: " + screenX.toString() + "\nY touch coordinate: " + screenY.toString() + "\nVolume switch state: " + vol.toString() + "\nSeek switch state: " + seek.toString(); } } /** * <b>Transmission class</b><br> * Contains transmission torque and transmission gear position * * @see NewtonMeter * @see GearPosition */ public class Transmission { private NewtonMeter torqueAtTransmission; private GearPosition transmissionGearPosition; /** * Initialize sensor data to defaults */ public Transmission() { torqueAtTransmission = new NewtonMeter(0); transmissionGearPosition = GearPosition.NEUTRAL; } /** * Get torque at the transmission * @return transmission torque in newton meters */ public NewtonMeter getTorqueAtTransmission() { return torqueAtTransmission; } /** * Set torque at the transmission shaft * @param torqueAtTransmission transmission torque in newton meters */ public void setTorqueAtTransmission(NewtonMeter torqueAtTransmission) { this.torqueAtTransmission = torqueAtTransmission; } /** * Get gear position of the transmission * @return transmission gear */ public GearPosition getTransmissionGearPosition() { return transmissionGearPosition; } /** * Set gear position of the transmission * @param transmissionGearPosition transmission gear */ public void setTransmissionGearPosition(GearPosition transmissionGearPosition) { this.transmissionGearPosition = transmissionGearPosition; } /** * Converts the transmission object to a string<br> * <pre> Torque at transmission: 0 Nm<br> Transmission gear position: NEUTRAL</pre> * @return Transmission string */ @Override public String toString() { return "Torque at transmission: " + torqueAtTransmission.toString() + "\nTransmission gear position: " + transmissionGearPosition.toString(); } } /** * <b>Cluster class</b><br> * Contains ambient temperature, fuel consumed, fuel level, odometer, oil pressure, vehicle speed and turn signal status * * @see Celsius * @see Liter * @see Kilometer * @see KilopascalGauge * @see KilometersPerHour * @see TurnSignalPosition */ public class Cluster { private Celsius ambientTemp, outsideAirTemp; private Liter fuelConsumed, fuelLevel; private Kilometer odometer; private KilopascalGauge oilPressure; private KilometersPerHour vehicleSpeed; private TurnSignalPosition turnSignalPosition; private boolean brakeFluidLow; /** * Initialize sensor data to defaults */ public Cluster() { ambientTemp = new Celsius(0); outsideAirTemp = new Celsius(0); fuelConsumed = new Liter(0); fuelLevel = new Liter(0); odometer = new Kilometer(0); oilPressure = new KilopascalGauge(0); vehicleSpeed = new KilometersPerHour(0); turnSignalPosition = TurnSignalPosition.OFF; brakeFluidLow = false; } /** * Get ambient temperature * @return ambient temperature in degrees celsius */ public Celsius getAmbientTemp() { return ambientTemp; } /** * Set ambient temperature * @param ambientTemp ambient temperature in degrees celsius */ public void setAmbientTemp(Celsius ambientTemp) { this.ambientTemp = ambientTemp; } /** * Get fuel consumed during last cycle * @return fuel consumed during last cycle in liters */ public Liter getFuelConsumed() { return fuelConsumed; } /** * Set fuel consumed during last cycle * @param fuelConsumed fuel consumed during last cycle in liters */ public void setFuelConsumed(Liter fuelConsumed) { this.fuelConsumed = fuelConsumed; } /** * Get fuel level * @return fuel level in liters */ public Liter getFuelLevel() { return fuelLevel; } /** * Set fuel level * @param fuelLevel fuel level in liters */ public void setFuelLevel(Liter fuelLevel) { this.fuelLevel = fuelLevel; } /** * Get odometer * @return odometer in kilometers */ public Kilometer getOdometer() { return odometer; } /** * Set odometer * @param odometer odometer in kilometers */ public void setOdometer(Kilometer odometer) { this.odometer = odometer; } /** * Get oil pressure * @return oil pressure in kilopascal gauge */ public KilopascalGauge getOilPressure() { return oilPressure; } /** * Set oil pressure * @param oilPressure oil pressure in kilopascal gauge */ public void setOilPressure(KilopascalGauge oilPressure) { this.oilPressure = oilPressure; } /** * Get vehicle speed * @return vehicle speed in kilometers per hour */ public KilometersPerHour getVehicleSpeed() { return vehicleSpeed; } /** * Set vehicle speed * @param vehicleSpeed vehicle speed in kilometers per hour */ public void setVehicleSpeed(KilometersPerHour vehicleSpeed) { this.vehicleSpeed = vehicleSpeed; } /** * Get turn signal position * @return turn signal position */ public TurnSignalPosition getTurnSignalPosition() { return turnSignalPosition; } /** * Set turn signal position * @param turnSignalPosition turn signal position */ public void setTurnSignalPosition(TurnSignalPosition turnSignalPosition) { this.turnSignalPosition = turnSignalPosition; } /** * Get outside air temperature * @return outside air temperature in degree celsius */ public Celsius getOutsideAirTemp() { return outsideAirTemp; } /** * Set outside air temperature * @param outsideAirTemp outside air temperature in degree celsius */ public void setOutsideAirTemp(Celsius outsideAirTemp) { this.outsideAirTemp = outsideAirTemp; } /** * Get brake fluid low status * @return brake fluid low status as a boolean */ public boolean getBrakeFluidLow() { return brakeFluidLow; } /** * Set brake fluid low status * @param brakeFluidLow brake fluid low status as a boolean */ public void setBrakeFluidLow(com.openxc.units.Boolean brakeFluidLow) { this.brakeFluidLow = brakeFluidLow.booleanValue(); } /** * Converts the cluster object to a string<br> * <pre> Ambient temperature: 0 C<br> Fuel consumed since last ignition: 0 L * Fuel level: 0 L<br> Odometer: 0 km<br> Oil pressure: 0 kPaG * Vehicle speed: 0 km/h<br> Turn signal position: OFF<br> Outside Air Temperature: 0 C</pre> * @return Cluster string */ @Override public String toString() { return "Ambient temperature: " + ambientTemp.toString() + "\nFuel consumed since last ignition:" + fuelConsumed.toString() + "\nFuel level: " + fuelLevel.toString() + "\nOdometer: " + odometer.toString() + "\nOil pressure: " + oilPressure.toString() + "\nVehicle speed: " + vehicleSpeed.toString() + "\nTurn signal position: " + turnSignalPosition.toString() + "\nOutside air temperature: " + outsideAirTemp.toString() + "\nBrake Fluid Low: " + brakeFluidLow; } } /** * <b>Diagnostic class</b><br> * Contains battery voltage and intake air temperature * * @see Volt * @see Celsius */ public class Diagnostic { private Volt batteryVoltage; private Celsius intakeAirTemp; private MetersPerSecondSquared vehicleAccelX, vehicleAccelY; /** * Initialize sensor data to defaults */ public Diagnostic() { batteryVoltage = new Volt(0); intakeAirTemp = new Celsius(0); vehicleAccelX = new MetersPerSecondSquared(0); vehicleAccelY = new MetersPerSecondSquared(0); } /** * Get battery voltage * @return battery voltage in volts */ public Volt getBatteryVoltage() { return batteryVoltage; } /** * Set battery voltage * @param batteryVoltage battery voltage in volts */ public void setBatteryVoltage(Volt batteryVoltage) { this.batteryVoltage = batteryVoltage; } /** * Get intake air temperature * @return intake air temperature in celsius */ public Celsius getIntakeAirTemp() { return intakeAirTemp; } /** * Set intake air temperature * @param intakeAirTemp intake air temperature in celsius */ public void setIntakeAirTemp(Celsius intakeAirTemp) { this.intakeAirTemp = intakeAirTemp; } /** * Get forward vehicle acceleration (x) * @return forward vehicle acceleration (x-direction) in meters per second squared */ public MetersPerSecondSquared getForwardAcceleration() { return vehicleAccelX; } /** * Set forward vehicle acceleration (x) * @param vehicleAccelX forward vehicle acceleration (x-direction) in meters per second squared */ public void setForwardAcceleration(MetersPerSecondSquared vehicleAccelX) { this.vehicleAccelX = vehicleAccelX; } /** * Get climb vehicle acceleration (y) * @return climb vehicle acceleration (y-direction) in meters per second squared */ public MetersPerSecondSquared getClimbAcceleration() { return vehicleAccelY; } /** * Set climb vehicle acceleration (y) * @param vehicleAccelY climb vehicle acceleration (y-direction) in meters per second squared */ public void setClimbAcceleration(MetersPerSecondSquared vehicleAccelY) { this.vehicleAccelY = vehicleAccelY; } /** * Converts the diagnostic object to a string<br> * <pre> Battery voltage: 0 V<br> Intake air temperature: 0 C</pre> * @return Transmission string */ @Override public String toString() { return "Battery voltage: " + batteryVoltage.toString() + "\nIntake air temperature: " + intakeAirTemp.toString() + "\nForward acceleration (x): " + vehicleAccelX.toString() + "\nClimb acceleration (y): " + vehicleAccelY.toString(); } } /** * <b>Lighting class</b><br> * Contains headlamp and high-beam statuses */ public class Lighting { private boolean headlamp, highBeam; /** * Initialize sensor data to defaults */ public Lighting() { } /** * Get headlamp status * @return headlamp status as a boolean */ public boolean getHeadlamp() { return headlamp; } /** * Set headlamp status * @param headlamp headlamp status as a boolean */ public void setHeadlamp(com.openxc.units.Boolean headlamp) { this.headlamp = headlamp.booleanValue(); } /** * Get high beam status * @return high beam status as a boolean */ public boolean getHighBeam() { return highBeam; } /** * Set high beam status * @param highBeam high beam status as a boolean */ public void setHighBeam(com.openxc.units.Boolean highBeam) { this.highBeam = highBeam.booleanValue(); } /** * Converts the lighting object to a string<br> * <pre> High beams on: false<br> Headlamps on: false</pre> * @return Transmission string */ @Override public String toString() { return "High beams on: " + highBeam + "\nHeadlamps on: " + headlamp; } } /** * <b>User Control class</b><br> * Contains accelerator pedal position, steering wheel angle, brake pedal, ignition, paddle shifter and parking brake statuses * * @see Degree * @see Percentage * @see BrakePedalPosition * @see IgnitionPosition * @see PaddleShifterPosition */ public class UserControl { private Percentage acceleratorPedalPosition; private Degree steeringWheelAngle; private BrakePedalPosition brakePedalPosition; private IgnitionPosition ignitionPosition; private boolean parkingBrakePosition; private PaddleShifterPosition paddleShifterPosition; /** * Initialize sensor data to defaults */ public UserControl() { acceleratorPedalPosition = new Percentage(0); steeringWheelAngle = new Degree(0); brakePedalPosition = BrakePedalPosition.SNA; ignitionPosition = IgnitionPosition.SNA; parkingBrakePosition = false; paddleShifterPosition = PaddleShifterPosition.SNA; } /** * Get accelerator pedal position * @return accelerator pedal position as a percentage (100% = pedal to the metal) */ public Percentage getAcceleratorPedalPosition() { return acceleratorPedalPosition; } /** * Set accelerator pedal position * @param acceleratorPedalPosition accelerator pedal position as a percentage (100% = pedal to the metal) */ public void setAcceleratorPedalPosition(Percentage acceleratorPedalPosition) { this.acceleratorPedalPosition = acceleratorPedalPosition; } /** * Set steering wheel angle * @return steering wheel angle in degrees (0 degrees=wheels straight, degree<0 = turning left, degree>0 = turning right) */ public Degree getSteeringWheelAngle() { return steeringWheelAngle; } /** * Set steering wheel angle * @param steeringWheelAngle steering wheel angle in degrees (0 degrees : wheels straight, degree=- : turning left, degree=+ : turning right) */ public void setSteeringWheelAngle(Degree steeringWheelAngle) { this.steeringWheelAngle = steeringWheelAngle; } /** * Get brake pedal position * @return brake pedal position */ public BrakePedalPosition getBrakePedalPosition() { return brakePedalPosition; } /** * Set brake pedal position * @param brakePedalPosition brake pedal position */ public void setBrakePedalPosition(BrakePedalPosition brakePedalPosition) { this.brakePedalPosition = brakePedalPosition; } /** * Get ignition position * @return ignition position */ public IgnitionPosition getIgnitionPosition() { return ignitionPosition; } /** * Set ignition position * @param ignitionPosition ignition position */ public void setIgnitionPosition(IgnitionPosition ignitionPosition) { this.ignitionPosition = ignitionPosition; } /** * Get parking brake status * @return parking brake status */ public boolean isParkingBrakePosition() { return parkingBrakePosition; } /** * Set parking brake status * @param parkingBrakePosition parking brake status */ public void setParkingBrakePosition(com.openxc.units.Boolean parkingBrakePosition) { this.parkingBrakePosition = parkingBrakePosition.booleanValue(); } /** * Get paddle shifter positions * @return paddle shifter position */ public PaddleShifterPosition getPaddleShifterPosition() { return paddleShifterPosition; } /** * Set paddle shifter positions * @param paddleShifterPosition paddle shifter position */ public void setPaddleShifterPosition(PaddleShifterPosition paddleShifterPosition) { this.paddleShifterPosition = paddleShifterPosition; } /** * Converts the user control object to a string<br> * <pre> Accelerator Pedal: 0 %<br> Brake Pedal: SNA * Ignition Status: SNA<br> Paddle Shifter Status: SNA<br> Parking brake status: false * Steering wheel angle: 0 degrees</pre> * @return Cluster string */ @Override public String toString() { return "Accelerator Pedal: " + acceleratorPedalPosition.toString() + "\nBrake Pedal: " + brakePedalPosition.toString() + "\nIgnition Status: " + ignitionPosition.toString() + "\nPaddle Shifter Status: " + paddleShifterPosition.toString() + "\nParking brake status: " + parkingBrakePosition + "\nSteering Wheel Angle: " + steeringWheelAngle.toString(); } } }
package com.hazelcast.internal.serialization.impl; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.QuickTest; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @RunWith(HazelcastParallelClassRunner.class) @Category(QuickTest.class) public class PortablePathCursorTest { @Rule public ExpectedException expected = ExpectedException.none(); @Test public void nonInitialised_throwsException() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); // THEN expected.expect(NullPointerException.class); cursor.advanceToNextToken(); } @Test public void initialised_reset_throwsException() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); cursor.init("engine.oil"); // WHEN cursor.reset(); // THEN expected.expect(NullPointerException.class); cursor.advanceToNextToken(); } @Test public void oneElementToken() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); cursor.init("engine"); // THEN assertEquals("engine", cursor.token()); assertEquals("engine", cursor.path()); assertTrue(cursor.isLastToken()); assertFalse(cursor.isAnyPath()); assertFalse(cursor.advanceToNextToken()); assertFalse(cursor.advanceToNextToken()); assertEquals("engine", cursor.token()); } @Test public void multiElementToken_iterationOverAll() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); cursor.init("engine.turbocharger.nozzle"); // THEN - generic options assertFalse(cursor.isAnyPath()); assertEquals("engine.turbocharger.nozzle", cursor.path()); // THEN - first token assertEquals("engine", cursor.token()); assertFalse(cursor.isLastToken()); // THEN - second token assertTrue(cursor.advanceToNextToken()); assertEquals("turbocharger", cursor.token()); assertFalse(cursor.isLastToken()); // THEN - third token assertTrue(cursor.advanceToNextToken()); assertEquals("nozzle", cursor.token()); assertTrue(cursor.isLastToken()); // THEN - no other token assertFalse(cursor.advanceToNextToken()); assertEquals("nozzle", cursor.token()); } @Test public void multiElementToken_anyPath() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); cursor.init("engine.turbocharger[any].nozzle"); // THEN assertTrue(cursor.isAnyPath()); } @Test public void multiElementToken_jumpingToIndex() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); cursor.init("engine.turbocharger.nozzle"); // THEN - 2nd token cursor.index(2); assertEquals("nozzle", cursor.token()); assertTrue(cursor.isLastToken()); assertEquals(2, cursor.index()); assertFalse(cursor.advanceToNextToken()); // THEN - 1st token cursor.index(0); assertEquals("engine", cursor.token()); assertFalse(cursor.isLastToken()); assertEquals(0, cursor.index()); assertTrue(cursor.advanceToNextToken()); // THEN - 3rd token cursor.index(1); assertEquals("turbocharger", cursor.token()); assertFalse(cursor.isLastToken()); assertEquals(1, cursor.index()); assertTrue(cursor.advanceToNextToken()); } @Test public void multiElementToken_jumpingToIndexOutOfBound() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); cursor.init("engine.turbocharger.nozzle"); // WHEN expected.expect(IndexOutOfBoundsException.class); cursor.index(3); // THEN cursor.token(); } @Test public void reuseOfCursor() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); // THEN cursor.init("engine.turbocharger.nozzle"); assertFalse(cursor.isAnyPath()); assertEquals("engine", cursor.token()); cursor.init("person.brain[any]"); assertTrue(cursor.isAnyPath()); assertEquals("person", cursor.token()); } @Test public void emptyPath() { PortablePathCursor cursor = new PortablePathCursor(); expected.expect(IllegalArgumentException.class); cursor.init(""); } @Test public void nullPath() { PortablePathCursor cursor = new PortablePathCursor(); expected.expect(IllegalArgumentException.class); cursor.init(null); } @Test public void wrongPath_dotOnly() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); expected.expect(IllegalArgumentException.class); cursor.init("."); // THEN assertEquals("", cursor.token()); assertEquals("", cursor.path()); assertTrue(cursor.isLastToken()); assertFalse(cursor.isAnyPath()); assertFalse(cursor.advanceToNextToken()); } @Test public void wrongPath_moreDots() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); expected.expect(IllegalArgumentException.class); cursor.init("..."); } @Test public void wrongPath_emptyTokens() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); cursor.init("a.."); // THEN - 1st token assertEquals("a", cursor.token()); assertTrue(cursor.advanceToNextToken()); // THEN - 2nd token assertTokenThrowsException(cursor); assertTrue(cursor.advanceToNextToken()); // THEN - 3rd token assertTokenThrowsException(cursor); assertFalse(cursor.advanceToNextToken()); } @Test public void wrongPath_pathEndingWithDot() { // GIVEN PortablePathCursor cursor = new PortablePathCursor(); cursor.init("a.b."); // THEN - 1st token assertEquals("a", cursor.token()); assertTrue(cursor.advanceToNextToken()); // THEN - 2nd token assertEquals("b", cursor.token()); assertTrue(cursor.advanceToNextToken()); // THEN - 3rd token assertTokenThrowsException(cursor); assertFalse(cursor.advanceToNextToken()); } private static void assertTokenThrowsException(PortablePathCursor cursor) { try { assertEquals("", cursor.token()); fail(); } catch (IllegalArgumentException ex) { } } }
package org.elkoserver.foundation.net; import java.util.HashMap; import java.util.Map; import org.elkoserver.foundation.boot.BootProperties; import org.elkoserver.util.trace.Trace; /** * Message handler factory to provide message handlers that wrap a message * stream inside a series of HTTP requests. * * The challenge is that HTTP can't be relied on to hold open a single TCP * connection continuously, even though that's the desired abstraction. * Instead, a series of pontentially short-lived HTTP over TCP connections need * to be turned into a single seamless message stream. The correlation between * HTTP requests and their associated message connections is done via swiss * numbers in the URLs. That job is done by HTTPMessageHandler objects, which * are dispensed here, and their associated HTTPSessionConnection objects. */ class HTTPMessageHandlerFactory implements MessageHandlerFactory { /** The message handler factory for the messages embedded in the composite stream. */ private MessageHandlerFactory myInnerFactory; /** HTTP framer to interpret HTTP POSTs and format HTTP replies. */ private HTTPFramer myHTTPFramer; /** The root URI for GETs and POSTs. */ private String myRootURI; /** Table of current sessions, indexed by ID number. */ private Map<Long, HTTPSessionConnection> mySessions; /** Table of current sessions, indexed by TCP connection. */ private Map<Connection, HTTPSessionConnection> mySessionsByConnection; /** Network manager for this server */ private NetworkManager myManager; /** Time an HTTP select request can wait before it must be responded to, in milliseconds. */ private int mySelectTimeout; /** Like mySelectTimeout, but when connection is in debug mode. */ private int myDebugSelectTimeout; /** Default select timeout if none is explicitly given, in seconds. */ static final int DEFAULT_SELECT_TIMEOUT = 60; /** Time an HTTP session can sit idle before being killed, in milliseconds. */ private int mySessionTimeout; /** Like mySessionTimeout, but when connection is in debug mode. */ private int myDebugSessionTimeout; /** Default session timeout if none is explicitly given, in seconds. */ static final int DEFAULT_SESSION_TIMEOUT = 15; /** * Each HTTP message handler wraps an application-level message handler, * which is the entity that will actually process the messages extracted * from the HTTP requests, so the HTTP message handler factory needs to * wrap the application-level message handler factory. * * @param innerFactory The application-level message handler factor that * is to be wrapped by this. * @param rootURI The root URI for GETs and POSTs. * @param httpFramer HTTP framer to interpret HTTP POSTs and format HTTP * replies. * @param manager Network manager for this server. */ HTTPMessageHandlerFactory(MessageHandlerFactory innerFactory, String rootURI, HTTPFramer httpFramer, NetworkManager manager) { myInnerFactory = innerFactory; myRootURI = "/" + rootURI + "/"; mySessions = new HashMap<Long, HTTPSessionConnection>(); mySessionsByConnection = new HashMap<Connection, HTTPSessionConnection>(); myHTTPFramer = httpFramer; myManager = manager; BootProperties props = manager.props(); mySelectTimeout = props.intProperty("conf.comm.httpselectwait", DEFAULT_SELECT_TIMEOUT) * 1000; myDebugSelectTimeout = props.intProperty("conf.comm.httpselectwait.debug", DEFAULT_SELECT_TIMEOUT) * 1000; mySessionTimeout = props.intProperty("conf.comm.httptimeout", DEFAULT_SESSION_TIMEOUT) * 1000; myDebugSessionTimeout = props.intProperty("conf.comm.httptimeout.debug", DEFAULT_SESSION_TIMEOUT) * 1000; } /** * Add a session to the session table. * * @param session The session to add. */ void addSession(HTTPSessionConnection session) { mySessions.put(session.sessionID(), session); } /** * Record the association of a TCP connection with an HTTP session. * * @param session The session. * @param connection The connection to associate with the session. */ private void associateTCPConnection(HTTPSessionConnection session, Connection connection) { HTTPSessionConnection knownSession = mySessionsByConnection.get(connection); if (knownSession != null) { knownSession.dissociateTCPConnection(connection); } mySessionsByConnection.put(connection, session); session.associateTCPConnection(connection); } /** * Handle an HTTP GET of a /connect/ URI, causing the creation of a new * session. * * @param connection The TCP connection upon which the connection request * was received. * @param uri HTTP GET URI fields. * * @return true if an HTTP reply was sent. */ private boolean doConnect(Connection connection, SessionURI uri) { HTTPSessionConnection session = new HTTPSessionConnection(this); associateTCPConnection(session, connection); if (Trace.comm.event && Trace.ON) { Trace.comm.eventm(session + " connect over " + connection); } String reply = myHTTPFramer.makeConnectReply(session.sessionID()); connection.sendMsg(reply); return true; } /** * Handle an HTTP GET of a /disconnect/ URI, causing the explicit * termination of an HTTP session by the browser. * * @param connection The TCP connection upon which the disconnect request * was received. * @param uri HTTP GET URI fields. * * @return true if an HTTP reply was sent. */ private boolean doDisconnect(Connection connection, SessionURI uri) { HTTPSessionConnection session = lookupSessionFromURI(connection, uri); if (session != null) { associateTCPConnection(session, connection); session.noteClientActivity(); } String reply; if (session == null) { Trace.comm.errorm("got disconnect with invalid session " + uri.sessionID); reply = myHTTPFramer.makeSequenceErrorReply("sessionIDError"); } else { reply = myHTTPFramer.makeDisconnectReply(); } connection.sendMsg(reply); if (session != null) { session.close(); } return true; } /** * Handle an HTTP GET or POST of a bad URI. * * @param connection The TCP connection upon which the bad URI * request was received. * @param uri The bad URI that was requested. * * @return true if an HTTP reply was sent. */ private boolean doError(Connection connection, String uri) { if (Trace.comm.usage && Trace.ON) { Trace.comm.usagem(connection + " received invalid URI in HTTP request " + uri); } connection.sendMsg(new HTTPError(404, "Not Found", myHTTPFramer.makeBadURLReply(uri))); return true; } /** * Handle an HTTP GET of a /select/ URI, requesting the delivery of * messages from the server to the client. * * @param connection The TCP connection upon which the select request was * received. * @param uri HTTP GET URI fields. * @param nonPersistent True if this request was flagged non-persistent. * * @return true if an HTTP reply was sent. */ private boolean doSelect(Connection connection, SessionURI uri, boolean nonPersistent) { HTTPSessionConnection session = lookupSessionFromURI(connection, uri); if (session != null) { associateTCPConnection(session, connection); return session.selectMessages(connection, uri, nonPersistent); } else { Trace.comm.errorm("got select with invalid session " + uri.sessionID); connection.sendMsg( myHTTPFramer.makeSequenceErrorReply("sessionIDError")); return true; } } /** * Handle an HTTP GET or POST of an /xmit/ URI, transmitting messages from * the client to the server. * * @param connection The TCP connection upon which the message(s) * was(were) delivered. * @param uri HTTP GET or POST URI fields. * @param message The body of the message(s) sent from the client. * * @return true if an HTTP reply was sent. */ private boolean doXmit(Connection connection, SessionURI uri, String message) { HTTPSessionConnection session = lookupSessionFromURI(connection, uri); if (session != null) { associateTCPConnection(session, connection); session.receiveMessage(connection, uri, message); } else { Trace.comm.errorm("got xmit with invalid session " + uri.sessionID); connection.sendMsg( myHTTPFramer.makeSequenceErrorReply("sessionIDError")); } return true; } /** * Look up the session associated with some session ID. * * @param sessionID The ID number of the session sought. * * @return the session whose session ID is 'sessionID', or null if there is * no such session. */ private HTTPSessionConnection getSession(long sessionID) { return mySessions.get(sessionID); } /** * Process an HTTP GET request, which (depending on the URI) may be * variously a request to connect a new session, to poll for server to * client messages for a session, a delivery of client to server messages * for a session, or to a request to disconnect a session. * * @param connection The TCP connection on which the HTTP request was * received. * @param uri The URI that was requested. * @param nonPersistent True if this request was flagged non-persistent. */ void handleGET(Connection connection, String uri, boolean nonPersistent) { SessionURI parsed = new SessionURI(uri, myRootURI); boolean replied; if (!parsed.valid) { replied = doError(connection, uri); } else if (parsed.verb == SessionURI.VERB_CONNECT) { replied = doConnect(connection, parsed); } else if (parsed.verb == SessionURI.VERB_SELECT) { replied = doSelect(connection, parsed, nonPersistent); } else if (parsed.verb == SessionURI.VERB_DISCONNECT) { replied = doDisconnect(connection, parsed); } else { replied = doError(connection, uri); } if (replied && nonPersistent) { connection.close(); } } /** * Process an HTTP OPTIONS request, used in the braindamaged, useless, but * seemingly inescapable request preflight handshake required by the CORS * standard for cross site request handling. * * @param connection The TCP connection on which the HTTP request was * received. * @param request The HTTP request itself, from which we will extract * header information. */ void handleOPTIONS(Connection connection, HTTPRequest request) { if (Trace.comm.event && Trace.ON) { Trace.comm.eventm("OPTIONS request over " + connection); } HTTPOptionsReply reply = new HTTPOptionsReply(request); connection.sendMsg(reply); } /** * Process an HTTP POST request, delivering messages for a session. * * @param connection The TCP connection on which the HTTP request was * received. * @param uri The URI that was posted. * @param nonPersistent True if this request was flagged non-persistent. * @param message The message body. */ void handlePOST(Connection connection, String uri, boolean nonPersistent, String message) { SessionURI parsed = new SessionURI(uri, myRootURI); boolean replied; if (!parsed.valid) { replied = doError(connection, uri); } else if (parsed.verb == SessionURI.VERB_SELECT) { replied = doSelect(connection, parsed, nonPersistent); } else if (parsed.verb == SessionURI.VERB_XMIT_POST) { replied = doXmit(connection, parsed, message); } else if (parsed.verb == SessionURI.VERB_CONNECT) { replied = doConnect(connection, parsed); } else if (parsed.verb == SessionURI.VERB_DISCONNECT) { replied = doDisconnect(connection, parsed); } else { replied = doError(connection, uri); } if (replied && nonPersistent) { connection.close(); } } /** * Get the HTTP framer for this factory. * * @return this factory's HTTP framer object. */ HTTPFramer httpFramer() { return myHTTPFramer; } /** * Obtain the inner message handler factory for this factory. This is the * factory for providing message handlers for the messages embedded inside * the HTTP requests whose handlers are in turn provided by this (outer) * factory. * * @return the inner message handler factory for this factory. */ MessageHandlerFactory innerFactory() { return myInnerFactory; } /** * Determine the HTTP session object associated with a requested URI. * * @param connection The connection that referenced the URI. * @param uri The URI uri describing the session of interest. * * @return the HTTP session corresponding to the session ID in 'uri', or * null if there was no such session. */ private HTTPSessionConnection lookupSessionFromURI(Connection connection, SessionURI uri) { HTTPSessionConnection session = getSession(uri.sessionID); if (session != null) { return session; } if (Trace.comm.usage && Trace.ON) { Trace.comm.usagem(connection + " received invalid session ID " + uri.sessionID); } return null; } /** * Get the network manager for this factory. * * @return this factory's network manager object. */ NetworkManager networkManager() { return myManager; } /** * Provide a message handler for a new (HTTP over TCP) connection. * * @param connection The TCP connection object that was just created. */ public MessageHandler provideMessageHandler(Connection connection) { return new HTTPMessageHandler(connection, this, sessionTimeout(false)); } /** * Remove a session from the session table. * * @param session The session to remove. */ void removeSession(HTTPSessionConnection session) { mySessions.remove(session.sessionID()); } /** * Get the HTTP select timeout interval: the time an HTTP request for a * select URL can remain open with no message traffic before the server * must respond. * * @param debug If true, return the debug-mode timeout; if false, return * the normal use timeout. * * @return the select timeout interval, in milliseconds. */ int selectTimeout(boolean debug) { if (debug) { return myDebugSelectTimeout; } else { return mySelectTimeout; } } /** * Get the HTTP session timeout interval: the time an HTTP session can be * idle before the server kills it. * * @param debug If true, return the debug-mode timeout; if false, return * the normal use timeout. * * @return the session timeout interval, in milliseconds. */ int sessionTimeout(boolean debug) { if (debug) { return myDebugSessionTimeout; } else { return mySessionTimeout; } } /** * Receive notification that an underlying TCP connection has died. * * @param connection The TCP connection that died. * @param reason The reason why. */ void tcpConnectionDied(Connection connection, Throwable reason) { HTTPSessionConnection session = mySessionsByConnection.remove(connection); if (session != null) { session.dissociateTCPConnection(connection); if (Trace.comm.event && Trace.ON) { Trace.comm.eventm(connection + " lost under " + session + ": " + reason); } } else { if (Trace.comm.event && Trace.ON) { Trace.comm.eventm(connection + " lost under no known HTTP session: " + reason); } } } }
/* * Copyright (c) 2002, 2004, Oracle and/or its affiliates. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of Oracle nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.sun.inputmethods.internal.codepointim; import java.text.AttributedCharacterIterator; import java.util.Map; import java.awt.AWTEvent; import java.awt.Toolkit; import java.awt.Rectangle; import java.awt.event.InputMethodEvent; import java.awt.event.KeyEvent; import java.awt.font.TextAttribute; import java.awt.font.TextHitInfo; import java.awt.im.InputMethodHighlight; import java.awt.im.spi.InputMethod; import java.awt.im.spi.InputMethodContext; import java.io.IOException; import java.text.AttributedString; import java.util.Locale; /** * The Code Point Input Method is a simple input method that allows Unicode * characters to be entered using their code point or code unit values. See the * accompanying file README.txt for more information. * * @author Brian Beck */ public class CodePointInputMethod implements InputMethod { private static final int UNSET = 0; private static final int ESCAPE = 1; // \u0000 - \uFFFF private static final int SPECIAL_ESCAPE = 2; // \U000000 - \U10FFFF private static final int SURROGATE_PAIR = 3; // \uD800\uDC00 - \uDBFF\uDFFF private InputMethodContext context; private Locale locale; private StringBuffer buffer; private int insertionPoint; private int format = UNSET; public CodePointInputMethod() throws IOException { } /** * This is the input method's main routine. The composed text is stored * in buffer. */ public void dispatchEvent(AWTEvent event) { // This input method handles KeyEvent only. if (!(event instanceof KeyEvent)) { return; } KeyEvent e = (KeyEvent) event; int eventID = event.getID(); boolean notInCompositionMode = buffer.length() == 0; if (eventID == KeyEvent.KEY_PRESSED) { // If we are not in composition mode, pass through if (notInCompositionMode) { return; } switch (e.getKeyCode()) { case KeyEvent.VK_LEFT: moveCaretLeft(); break; case KeyEvent.VK_RIGHT: moveCaretRight(); break; } } else if (eventID == KeyEvent.KEY_TYPED) { char c = e.getKeyChar(); // If we are not in composition mode, wait a back slash if (notInCompositionMode) { // If the type character is not a back slash, pass through if (c != '\\') { return; } startComposition(); // Enter to composition mode } else { switch (c) { case ' ': // Exit from composition mode finishComposition(); break; case '\u007f': // Delete deleteCharacter(); break; case '\b': // BackSpace deletePreviousCharacter(); break; case '\u001b': // Escape cancelComposition(); break; case '\n': // Return case '\t': // Tab sendCommittedText(); break; default: composeUnicodeEscape(c); break; } } } else { // KeyEvent.KEY_RELEASED // If we are not in composition mode, pass through if (notInCompositionMode) { return; } } e.consume(); } private void composeUnicodeEscape(char c) { switch (buffer.length()) { case 1: // \\ waitEscapeCharacter(c); break; case 2: // \\u or \\U case 3: // \\ux or \\Ux case 4: // \\uxx or \\Uxx waitDigit(c); break; case 5: // \\uxxx or \\Uxxx if (format == SPECIAL_ESCAPE) { waitDigit(c); } else { waitDigit2(c); } break; case 6: // \\uxxxx or \\Uxxxx if (format == SPECIAL_ESCAPE) { waitDigit(c); } else if (format == SURROGATE_PAIR) { waitBackSlashOrLowSurrogate(c); } else { beep(); } break; case 7: // \\Uxxxxx // Only SPECIAL_ESCAPE format uses this state. // Since the second "\\u" of SURROGATE_PAIR format is inserted // automatically, users don't have to type these keys. waitDigit(c); break; case 8: // \\uxxxx\\u case 9: // \\uxxxx\\ux case 10: // \\uxxxx\\uxx case 11: // \\uxxxx\\uxxx if (format == SURROGATE_PAIR) { waitDigit(c); } else { beep(); } break; default: beep(); break; } } private void waitEscapeCharacter(char c) { if (c == 'u' || c == 'U') { buffer.append(c); insertionPoint++; sendComposedText(); format = (c == 'u') ? ESCAPE : SPECIAL_ESCAPE; } else { if (c != '\\') { buffer.append(c); insertionPoint++; } sendCommittedText(); } } private void waitDigit(char c) { if (Character.digit(c, 16) != -1) { buffer.insert(insertionPoint++, c); sendComposedText(); } else { beep(); } } private void waitDigit2(char c) { if (Character.digit(c, 16) != -1) { buffer.insert(insertionPoint++, c); char codePoint = (char)getCodePoint(buffer, 2, 5); if (Character.isHighSurrogate(codePoint)) { format = SURROGATE_PAIR; buffer.append("\\u"); insertionPoint = 8; } else { format = ESCAPE; } sendComposedText(); } else { beep(); } } private void waitBackSlashOrLowSurrogate(char c) { if (insertionPoint == 6) { if (c == '\\') { buffer.append(c); buffer.append('u'); insertionPoint = 8; sendComposedText(); } else if (Character.digit(c, 16) != -1) { buffer.append("\\u"); buffer.append(c); insertionPoint = 9; sendComposedText(); } else { beep(); } } else { beep(); } } /** * Send the composed text to the client. */ private void sendComposedText() { AttributedString as = new AttributedString(buffer.toString()); as.addAttribute(TextAttribute.INPUT_METHOD_HIGHLIGHT, InputMethodHighlight.SELECTED_RAW_TEXT_HIGHLIGHT); context.dispatchInputMethodEvent( InputMethodEvent.INPUT_METHOD_TEXT_CHANGED, as.getIterator(), 0, TextHitInfo.leading(insertionPoint), null); } /** * Send the committed text to the client. */ private void sendCommittedText() { AttributedString as = new AttributedString(buffer.toString()); context.dispatchInputMethodEvent( InputMethodEvent.INPUT_METHOD_TEXT_CHANGED, as.getIterator(), buffer.length(), TextHitInfo.leading(insertionPoint), null); buffer.setLength(0); insertionPoint = 0; format = UNSET; } /** * Move the insertion point one position to the left in the composed text. * Do not let the caret move to the left of the "\\u" or "\\U". */ private void moveCaretLeft() { int len = buffer.length(); if (--insertionPoint < 2) { insertionPoint++; beep(); } else if (format == SURROGATE_PAIR && insertionPoint == 7) { insertionPoint = 8; beep(); } context.dispatchInputMethodEvent( InputMethodEvent.CARET_POSITION_CHANGED, null, 0, TextHitInfo.leading(insertionPoint), null); } /** * Move the insertion point one position to the right in the composed text. */ private void moveCaretRight() { int len = buffer.length(); if (++insertionPoint > len) { insertionPoint = len; beep(); } context.dispatchInputMethodEvent( InputMethodEvent.CARET_POSITION_CHANGED, null, 0, TextHitInfo.leading(insertionPoint), null); } /** * Delete the character preceding the insertion point in the composed text. * If the insertion point is not at the end of the composed text and the * preceding text is "\\u" or "\\U", ring the bell. */ private void deletePreviousCharacter() { if (insertionPoint == 2) { if (buffer.length() == 2) { cancelComposition(); } else { // Do not allow deletion of the leading "\\u" or "\\U" if there // are other digits in the composed text. beep(); } } else if (insertionPoint == 8) { if (buffer.length() == 8) { if (format == SURROGATE_PAIR) { buffer.deleteCharAt(--insertionPoint); } buffer.deleteCharAt(--insertionPoint); sendComposedText(); } else { // Do not allow deletion of the second "\\u" if there are other // digits in the composed text. beep(); } } else { buffer.deleteCharAt(--insertionPoint); if (buffer.length() == 0) { sendCommittedText(); } else { sendComposedText(); } } } /** * Delete the character following the insertion point in the composed text. * If the insertion point is at the end of the composed text, ring the bell. */ private void deleteCharacter() { if (insertionPoint < buffer.length()) { buffer.deleteCharAt(insertionPoint); sendComposedText(); } else { beep(); } } private void startComposition() { buffer.append('\\'); insertionPoint = 1; sendComposedText(); } private void cancelComposition() { buffer.setLength(0); insertionPoint = 0; sendCommittedText(); } private void finishComposition() { int len = buffer.length(); if (len == 6 && format != SPECIAL_ESCAPE) { char codePoint = (char)getCodePoint(buffer, 2, 5); if (Character.isValidCodePoint(codePoint) && codePoint != 0xFFFF) { buffer.setLength(0); buffer.append(codePoint); sendCommittedText(); return; } } else if (len == 8 && format == SPECIAL_ESCAPE) { int codePoint = getCodePoint(buffer, 2, 7); if (Character.isValidCodePoint(codePoint) && codePoint != 0xFFFF) { buffer.setLength(0); buffer.appendCodePoint(codePoint); sendCommittedText(); return; } } else if (len == 12 && format == SURROGATE_PAIR) { char[] codePoint = { (char)getCodePoint(buffer, 2, 5), (char)getCodePoint(buffer, 8, 11) }; if (Character.isHighSurrogate(codePoint[0]) && Character.isLowSurrogate(codePoint[1])) { buffer.setLength(0); buffer.append(codePoint); sendCommittedText(); return; } } beep(); } private int getCodePoint(StringBuffer sb, int from, int to) { int value = 0; for (int i = from; i <= to; i++) { value = (value<<4) + Character.digit(sb.charAt(i), 16); } return value; } private static void beep() { Toolkit.getDefaultToolkit().beep(); } public void activate() { if (buffer == null) { buffer = new StringBuffer(12); insertionPoint = 0; } } public void deactivate(boolean isTemporary) { if (!isTemporary) { buffer = null; } } public void dispose() { } public Object getControlObject() { return null; } public void endComposition() { sendCommittedText(); } public Locale getLocale() { return locale; } public void hideWindows() { } public boolean isCompositionEnabled() { // always enabled return true; } public void notifyClientWindowChange(Rectangle location) { } public void reconvert() { // not supported yet throw new UnsupportedOperationException(); } public void removeNotify() { } public void setCharacterSubsets(Character.Subset[] subsets) { } public void setCompositionEnabled(boolean enable) { // not supported yet throw new UnsupportedOperationException(); } public void setInputMethodContext(InputMethodContext context) { this.context = context; } /* * The Code Point Input Method supports all locales. */ public boolean setLocale(Locale locale) { this.locale = locale; return true; } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2021 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.controls; import org.eclipse.jface.action.Action; import org.eclipse.jface.action.IContributionManager; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.viewers.*; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ControlAdapter; import org.eclipse.swt.events.ControlEvent; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.graphics.Rectangle; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.widgets.*; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.model.DBIcon; import org.jkiss.dbeaver.ui.*; import org.jkiss.dbeaver.ui.dialogs.BaseDialog; import org.jkiss.dbeaver.ui.internal.UIMessages; import org.jkiss.dbeaver.ui.internal.UINavigatorMessages; import org.jkiss.dbeaver.utils.RuntimeUtils; import org.jkiss.utils.CommonUtils; import java.lang.reflect.Array; import java.text.Collator; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.List; /** * Tree/table viewer column controller */ public class ViewerColumnController<COLUMN, ELEMENT> { private static final Log log = Log.getLog(ViewerColumnController.class); private static final String DATA_KEY = ViewerColumnController.class.getSimpleName(); private static final int MIN_COLUMN_AUTO_WIDTH = 100; private final String configId; private final ColumnViewer viewer; private final List<ColumnInfo> columns = new ArrayList<>(); private boolean clickOnHeader; private boolean isPacking, isInitializing; private DBIcon defaultIcon; private boolean forceAutoSize; private transient ObjectViewerRenderer cellRenderer; private transient Listener menuListener; public static ViewerColumnController getFromControl(Control control) { return (ViewerColumnController)control.getData(DATA_KEY); } public ViewerColumnController(String id, ColumnViewer viewer) { this.configId = id; this.viewer = viewer; final Control control = this.viewer.getControl(); control.setData(DATA_KEY, this); if (control instanceof Tree || control instanceof Table) { menuListener = event -> { Point pt = control.getDisplay().map(null, control, new Point(event.x, event.y)); Rectangle clientArea = ((Composite) control).getClientArea(); if (RuntimeUtils.isMacOS()) { clickOnHeader = pt.y < 0; } else { if (control instanceof Tree) { clickOnHeader = clientArea.y <= pt.y && pt.y < (clientArea.y + ((Tree) control).getHeaderHeight()); } else { clickOnHeader = clientArea.y <= pt.y && pt.y < (clientArea.y + ((Table) control).getHeaderHeight()); } } }; control.addListener(SWT.MenuDetect, menuListener); } cellRenderer = new ObjectViewerRenderer(viewer, false) { @Nullable @Override public Object getCellValue(Object element, int columnIndex) { List<ColumnInfo> visibleColumns = getVisibleColumns(); if (!visibleColumns.isEmpty()) { ColumnInfo columnInfo = getVisibleColumns().get(columnIndex); if (columnInfo.labelProvider instanceof ColumnBooleanLabelProvider) { return ((ColumnBooleanLabelProvider) columnInfo.labelProvider).getValueProvider().getValue(element); } } return null; } }; } public void dispose() { clearColumns(); final Control control = this.viewer.getControl(); if (!control.isDisposed()) { if (menuListener != null) { control.removeListener(SWT.MenuDetect, menuListener); menuListener = null; } } } public boolean isClickOnHeader() { return clickOnHeader; } public void setForceAutoSize(boolean forceAutoSize) { this.forceAutoSize = forceAutoSize; } public void setDefaultIcon(DBIcon defaultIcon) { this.defaultIcon = defaultIcon; } public void fillConfigMenu(IContributionManager menuManager) { menuManager.add(new Action(UINavigatorMessages.obj_editor_properties_control_action_configure_columns, DBeaverIcons.getImageDescriptor(UIIcon.CONFIGURATION)) { { setDescription(UINavigatorMessages.obj_editor_properties_control_action_configure_columns_description); } @Override public void run() { configureColumns(); } }); } public void addColumn( String name, String description, int style, boolean defaultVisible, boolean required, IColumnTextProvider<ELEMENT> labelProvider, EditingSupport editingSupport) { addColumn(name, description, style, defaultVisible, required, false, null, new ColumnLabelProvider() { @Override public String getText(Object element) { return labelProvider.getText((ELEMENT) element); } @Override public void update(ViewerCell cell) { if (cell.getColumnIndex() == 0) { if (defaultIcon != null) { cell.setImage(DBeaverIcons.getImage(defaultIcon)); } } cell.setText(labelProvider.getText((ELEMENT) cell.getElement())); } }, editingSupport); } public void addBooleanColumn( String name, String description, int style, boolean defaultVisible, boolean required, IColumnValueProvider<ELEMENT, Boolean> valueProvider, EditingSupport editingSupport) { addColumn(name, description, style, defaultVisible, required, false, null, new ColumnBooleanLabelProvider<>(valueProvider), editingSupport); } public void addColumn(String name, String description, int style, boolean defaultVisible, boolean required, CellLabelProvider labelProvider) { addColumn(name, description, style, defaultVisible, required, false, null, labelProvider, null); } public void addColumn( String name, String description, int style, boolean defaultVisible, boolean required, boolean numeric, Object userData, CellLabelProvider labelProvider, EditingSupport editingSupport) { columns.add( new ColumnInfo( name, description, style, defaultVisible, required, numeric, userData, labelProvider, editingSupport, columns.size())); } private void clearColumns() { for (ColumnInfo columnInfo : columns) { if (columnInfo.column != null) { columnInfo.column.dispose(); columnInfo.column = null; } } columns.clear(); } public void createColumns() { this.createColumns(true); } public void createColumns(boolean pack) { try { readColumnsConfiguration(); } catch (Exception e) { // Possibly incompatible format from previous version log.warn("Failed to load configuration for '" + this.configId + "'", e); //$NON-NLS-1$ //$NON-NLS-2$ } recreateColumns(pack); } private void recreateColumns(boolean pack) { final Control control = viewer.getControl(); if (control == null || control.isDisposed()) { return; } control.setRedraw(false); isInitializing = true; try { boolean needRefresh = false; for (ColumnInfo columnInfo : columns) { boolean columnExists = (columnInfo.column != null); if (columnExists != columnInfo.visible) { needRefresh = true; } if (columnInfo.column != null) { columnInfo.column.dispose(); columnInfo.column = null; } } createVisibleColumns(); if (needRefresh) { viewer.refresh(); } boolean allSized = isAllSized(); if (pack && !allSized) { repackColumns(); control.addControlListener(new ControlAdapter() { @Override public void controlResized(ControlEvent e) { control.removeControlListener(this); if (getRowCount() > 0) { repackColumns(); } } }); } } finally { control.setRedraw(true); isInitializing = false; } } private boolean isAllSized() { for (ColumnInfo columnInfo : columns) { if (columnInfo.visible && columnInfo.width <= 0) { return false; } } return true; } public void repackColumns() { if (isAllSized()) { return; } isPacking = true; try { int itemCount = 0; if (viewer instanceof TreeViewer) { itemCount = ((TreeViewer) viewer).getTree().getItemCount(); float[] ratios = null; if (((TreeViewer) viewer).getTree().getColumnCount() == 2) { ratios = new float[]{0.6f, 0.4f}; } UIUtils.packColumns(((TreeViewer) viewer).getTree(), forceAutoSize, ratios); } else if (viewer instanceof TableViewer) { itemCount = ((TableViewer) viewer).getTable().getItemCount(); UIUtils.packColumns(((TableViewer)viewer).getTable(), forceAutoSize); } if (itemCount == 0) { // Fix too narrow width for empty lists for (ColumnInfo columnInfo : getVisibleColumns()) { if (columnInfo.column instanceof TreeColumn) { if (((TreeColumn) columnInfo.column) .getWidth() < MIN_COLUMN_AUTO_WIDTH) { ((TreeColumn) columnInfo.column).setWidth(MIN_COLUMN_AUTO_WIDTH); columnInfo.width = MIN_COLUMN_AUTO_WIDTH; } } else if (columnInfo.column instanceof TableColumn) { if (((TableColumn) columnInfo.column) .getWidth() < MIN_COLUMN_AUTO_WIDTH) { ((TableColumn) columnInfo.column).setWidth(MIN_COLUMN_AUTO_WIDTH); columnInfo.width = MIN_COLUMN_AUTO_WIDTH; } } } } } finally { isPacking = false; } } public void autoSizeColumns() { UIUtils.asyncExec(() -> { Control control = this.viewer.getControl(); if (control instanceof Tree) { UIUtils.packColumns((Tree) control, true, null); } else if (control instanceof Table) { UIUtils.packColumns((Table) control, true); } }); } public void sortByColumn(int index, int direction) { final ColumnInfo columnInfo = columns.get(index); columnInfo.sortListener.sortViewer(columnInfo.column, direction); } private void createVisibleColumns() { boolean hasCustomDraw = false; List<ColumnInfo> visibleColumns = getVisibleColumns(); for (int i = 0; i < visibleColumns.size(); i++) { final ColumnInfo columnInfo = visibleColumns.get(i); columnInfo.order = i; final Item colItem; ViewerColumn viewerColumn; if (viewer instanceof TreeViewer) { final TreeViewerColumn item = new TreeViewerColumn((TreeViewer) viewer, columnInfo.style); viewerColumn = item; final TreeColumn column = item.getColumn(); colItem = column; column.setText(columnInfo.name); column.setMoveable(true); if (columnInfo.width > 0) { column.setWidth(columnInfo.width); } if (!CommonUtils.isEmpty(columnInfo.description)) { column.setToolTipText(columnInfo.description); } column.addControlListener(new ControlAdapter() { @Override public void controlResized(ControlEvent e) { if (!isInitializing && !isPacking) { columnInfo.width = column.getWidth(); if (getRowCount() > 0) { saveColumnConfig(); } } } @Override public void controlMoved(ControlEvent e) { if (!isInitializing && e.getSource() instanceof TreeColumn) { updateColumnOrder(column, column.getParent().getColumnOrder()); } } }); columnInfo.column = column; } else if (viewer instanceof TableViewer) { final TableViewerColumn item = new TableViewerColumn((TableViewer) viewer, columnInfo.style); viewerColumn = item; final TableColumn column = item.getColumn(); colItem = column; column.setText(columnInfo.name); column.setMoveable(true); column.setWidth(columnInfo.width); if (!CommonUtils.isEmpty(columnInfo.description)) { column.setToolTipText(columnInfo.description); } column.addControlListener(new ControlAdapter() { @Override public void controlResized(ControlEvent e) { columnInfo.width = column.getWidth(); if (getRowCount() > 0) { saveColumnConfig(); } } @Override public void controlMoved(ControlEvent e) { if (!isInitializing && e.getSource() instanceof TableColumn) { updateColumnOrder(column, column.getParent().getColumnOrder()); } } }); columnInfo.column = column; } else { continue; } viewerColumn.setLabelProvider(columnInfo.labelProvider); viewerColumn.setEditingSupport(columnInfo.editingSupport); colItem.setData(columnInfo); if (columnInfo.labelProvider instanceof ILazyLabelProvider || columnInfo.labelProvider instanceof ColumnBooleanLabelProvider) { hasCustomDraw = true; } else if (columnInfo.labelProvider instanceof ILabelProvider) { columnInfo.sortListener = new SortListener(viewer, columnInfo); columnInfo.column.addListener(SWT.Selection, columnInfo.sortListener); } } if (hasCustomDraw) { viewer.getControl().addListener(SWT.PaintItem, event -> { ColumnInfo columnInfo; if (viewer instanceof TreeViewer) { TreeColumn column = ((TreeViewer) viewer).getTree().getColumn(event.index); columnInfo = (ColumnInfo) column.getData(); if (columnInfo.labelProvider instanceof ILazyLabelProvider && CommonUtils.isEmpty(((TreeItem) event.item).getText(event.index))) { final String lazyText = ((ILazyLabelProvider) columnInfo.labelProvider).getLazyText(event.item.getData()); if (!CommonUtils.isEmpty(lazyText)) { ((TreeItem) event.item).setText(event.index, lazyText); } } } else { TableColumn column = ((TableViewer) viewer).getTable().getColumn(event.index); columnInfo = (ColumnInfo) column.getData(); if (columnInfo.labelProvider instanceof ILazyLabelProvider && CommonUtils.isEmpty(((TableItem) event.item).getText(event.index))) { final String lazyText = ((ILazyLabelProvider) columnInfo.labelProvider).getLazyText(event.item.getData()); if (!CommonUtils.isEmpty(lazyText)) { ((TableItem) event.item).setText(event.index, lazyText); } } } if (columnInfo.labelProvider instanceof ColumnBooleanLabelProvider<?, ?>) { Object element = event.item.getData(); Object cellValue = ((ColumnBooleanLabelProvider) columnInfo.labelProvider).getValueProvider().getValue(element); cellRenderer.paintCell(event, element, cellValue, event.item, Boolean.class, event.index, true, (event.detail & SWT.SELECTED) == SWT.SELECTED); } }); } } public List<ColumnInfo> getVisibleColumns() { List<ColumnInfo> visibleList = new ArrayList<>(); for (ColumnInfo column : columns) { if (column.visible) { visibleList.add(column); } } visibleList.sort(new ColumnInfoComparator()); return visibleList; } // Read config from dialog settings private void readColumnsConfiguration() { final Collection<ViewerColumnRegistry.ColumnState> savedConfig = ViewerColumnRegistry.getInstance().getSavedConfig(configId); if (savedConfig == null || savedConfig.isEmpty()) { return; } boolean hasVisible = false; for (ViewerColumnRegistry.ColumnState savedState : savedConfig) { if (savedState.visible) { hasVisible = true; break; } } if (!hasVisible) { return; } for (ColumnInfo columnInfo : columns) { for (ViewerColumnRegistry.ColumnState savedState : savedConfig) { if (columnInfo.name.equals(savedState.name)) { columnInfo.visible = savedState.visible; columnInfo.order = savedState.order; columnInfo.width = savedState.width; break; } } } } public COLUMN getColumnData(int columnIndex) { final Control control = viewer.getControl(); ColumnInfo columnInfo; if (control instanceof Tree) { columnInfo = (ColumnInfo) ((Tree) control).getColumn(columnIndex).getData(); } else { columnInfo = (ColumnInfo) ((Table) control).getColumn(columnIndex).getData(); } return (COLUMN) columnInfo.userData; } public COLUMN[] getColumnsData(Class<COLUMN> type) { COLUMN[] newArray = (COLUMN[]) Array.newInstance(type, columns.size()); for (int i = 0; i < columns.size(); i++) { newArray[i] = type.cast(columns.get(i).userData); } return newArray; } public boolean configureColumns() { ConfigDialog configDialog = new ConfigDialog(); if (configDialog.open() != IDialogConstants.OK_ID) { return false; } saveColumnConfig(); return true; } private void updateColumnOrder(Item column, int[] order) { if (isPacking) { return; } ColumnInfo columnInfo = (ColumnInfo) column.getData(); boolean updated = false; for (int i = 0; i < order.length; i++) { if (order[i] == columnInfo.order) { columnInfo.order = i; updated = true; break; } } if (updated) { saveColumnConfig(); } } private void saveColumnConfig() { // Save settings only if we have at least one rows. Otherwise ViewerColumnRegistry.getInstance().updateConfig(configId, columns); } public int getColumnsCount() { final Control control = viewer.getControl(); return control instanceof Tree ? ((Tree) control).getColumnCount() : ((Table) control).getColumnCount(); } public int getRowCount() { final Control control = viewer.getControl(); return control instanceof Tree ? ((Tree) control).getItemCount() : ((Table) control).getItemCount(); } public int getEditableColumnIndex(Object element) { for (ColumnInfo info : getVisibleColumns()) { if (info.editingSupport != null) { return info.order; } } return -1; } private static class ColumnInfo extends ViewerColumnRegistry.ColumnState { final String description; final int style; final boolean defaultVisible; final boolean required; final boolean numeric; final Object userData; final CellLabelProvider labelProvider; final EditingSupport editingSupport; Item column; SortListener sortListener; private ColumnInfo(String name, String description, int style, boolean defaultVisible, boolean required, boolean numeric, Object userData, CellLabelProvider labelProvider, EditingSupport editingSupport, int order) { this.name = name; this.description = description; this.style = style; this.defaultVisible = defaultVisible; this.required = required; this.numeric = numeric; this.userData = userData; this.visible = defaultVisible; this.labelProvider = labelProvider; this.editingSupport = editingSupport; this.order = order; } } private class ConfigDialog extends BaseDialog { private Table colTable; //private final Map<ColumnInfo, Button> buttonMap = new HashMap<>(); protected ConfigDialog() { super(viewer.getControl().getShell(), UINavigatorMessages.label_configure_columns, UIIcon.CONFIGURATION); } protected void setShellStyle(int newShellStyle) { super.setShellStyle(newShellStyle & ~SWT.MAX); } @Override protected boolean isResizable() { return true; } @Override protected Composite createDialogArea(Composite parent) { Composite composite = super.createDialogArea(parent); UIUtils.createControlLabel(composite, UINavigatorMessages.label_select_columns); List<ColumnInfo> orderedList = new ArrayList<>(columns); orderedList.sort(new ColumnInfoComparator()); colTable = new Table(composite, SWT.BORDER | SWT.CHECK | SWT.H_SCROLL | SWT.V_SCROLL); colTable.setLayoutData(new GridData(GridData.FILL_BOTH)); colTable.setLinesVisible(true); colTable.addListener(SWT.Selection, event -> { if( event.detail == SWT.CHECK ) { if (((TableItem)event.item).getGrayed()) { ((TableItem)event.item).setChecked(true); event.doit = false; } } }); final TableColumn nameColumn = new TableColumn(colTable, SWT.LEFT); nameColumn.setText(UINavigatorMessages.label_name); final TableColumn descColumn = new TableColumn(colTable, SWT.LEFT); descColumn.setText(UINavigatorMessages.label_description); for (ColumnInfo columnInfo : orderedList) { TableItem colItem = new TableItem(colTable, SWT.NONE); colItem.setData(columnInfo); colItem.setText(0, columnInfo.name); if (!CommonUtils.isEmpty(columnInfo.description)) { colItem.setText(1, columnInfo.description); } colItem.setChecked(columnInfo.visible); if (columnInfo.required) { colItem.setGrayed(true); } } nameColumn.pack(); if (nameColumn.getWidth() > 300) { nameColumn.setWidth(300); } descColumn.pack(); if (descColumn.getWidth() > 400) { descColumn.setWidth(400); } return parent; } @Override protected void okPressed() { boolean recreateColumns = false; for (TableItem item : colTable.getItems()) { ColumnInfo ci = (ColumnInfo) item.getData(); if (item.getChecked() != ci.visible) { ci.visible = item.getChecked(); recreateColumns = true; } } if (recreateColumns) { recreateColumns(true); } super.okPressed(); } @Override protected void createButtonsForButtonBar(Composite parent) { createButton(parent, IDialogConstants.DETAILS_ID, UIMessages.button_reset_to_defaults, false); //$NON-NLS-1$ super.createButtonsForButtonBar(parent); } @Override protected void buttonPressed(int buttonId) { if (buttonId == IDialogConstants.DETAILS_ID) { resetToDefaults(); } super.buttonPressed(buttonId); } private void resetToDefaults() { for (TableItem item : colTable.getItems()) { ColumnInfo ci = (ColumnInfo) item.getData(); item.setChecked(ci.defaultVisible); } } } private static class ColumnInfoComparator implements Comparator<ColumnInfo> { @Override public int compare(ColumnInfo o1, ColumnInfo o2) { return o1.order - o2.order; } } private static class SortListener implements Listener { ColumnViewer viewer; ColumnInfo columnInfo; int sortDirection = SWT.UP; Item prevColumn = null; public SortListener(ColumnViewer viewer, ColumnInfo columnInfo) { this.viewer = viewer; this.columnInfo = columnInfo; } @Override public void handleEvent(Event e) { Item column = (Item)e.widget; if (prevColumn == column) { // Set reverse order sortDirection = sortDirection == SWT.UP ? SWT.DOWN : SWT.UP; } prevColumn = column; sortViewer(column, sortDirection); } private void sortViewer(final Item column, final int sortDirection) { Collator collator = Collator.getInstance(); if (viewer instanceof TreeViewer) { ((TreeViewer)viewer).getTree().setSortColumn((TreeColumn) column); ((TreeViewer)viewer).getTree().setSortDirection(sortDirection); } else { ((TableViewer)viewer).getTable().setSortColumn((TableColumn) column); ((TableViewer)viewer).getTable().setSortDirection(sortDirection); } final ILabelProvider labelProvider = (ILabelProvider)columnInfo.labelProvider; final ILabelProviderEx exLabelProvider = labelProvider instanceof ILabelProviderEx ? (ILabelProviderEx)labelProvider : null; viewer.setComparator(new ViewerComparator(collator) { private final NumberFormat numberFormat = NumberFormat.getInstance(); @Override public int compare(Viewer v, Object e1, Object e2) { int result; String value1; String value2; if (exLabelProvider != null) { value1 = exLabelProvider.getText(e1, false); value2 = exLabelProvider.getText(e2, false); } else { value1 = labelProvider.getText(e1); value2 = labelProvider.getText(e2); } if (value1 == null && value2 == null) { result = 0; } else if (value1 == null) { result = -1; } else if (value2 == null) { result = 1; } else { if (columnInfo.numeric) { try { final Number num1 = numberFormat.parse(value1); final Number num2 = numberFormat.parse(value2); if (num1.getClass() == num2.getClass() && num1 instanceof Comparable) { result = ((Comparable) num1).compareTo(num2); } else { // Dunno how to compare result = 0; } } catch (Exception e) { // not numbers result = value1.compareToIgnoreCase(value2); } } else { result = value1.compareToIgnoreCase(value2); } } return sortDirection == SWT.UP ? result : -result; } }); } } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.core.env; import java.util.Iterator; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.util.StringUtils; /** * Default implementation of the {@link PropertySources} interface. * Allows manipulation of contained property sources and provides a constructor * for copying an existing {@code PropertySources} instance. * * <p>Where <em>precedence</em> is mentioned in methods such as {@link #addFirst} * and {@link #addLast}, this is with regard to the order in which property sources * will be searched when resolving a given property with a {@link PropertyResolver}. * * @author Chris Beams * @author Juergen Hoeller * @since 3.1 * @see PropertySourcesPropertyResolver */ public class MutablePropertySources implements PropertySources { private final Log logger; private final List<PropertySource<?>> propertySourceList = new CopyOnWriteArrayList<>(); /** * Create a new {@link MutablePropertySources} object. */ public MutablePropertySources() { this.logger = LogFactory.getLog(getClass()); } /** * Create a new {@code MutablePropertySources} from the given propertySources * object, preserving the original order of contained {@code PropertySource} objects. */ public MutablePropertySources(PropertySources propertySources) { this(); for (PropertySource<?> propertySource : propertySources) { addLast(propertySource); } } /** * Create a new {@link MutablePropertySources} object and inherit the given logger, * usually from an enclosing {@link Environment}. */ MutablePropertySources(Log logger) { this.logger = logger; } @Override public boolean contains(String name) { return this.propertySourceList.contains(PropertySource.named(name)); } @Override public PropertySource<?> get(String name) { int index = this.propertySourceList.indexOf(PropertySource.named(name)); return (index != -1 ? this.propertySourceList.get(index) : null); } @Override public Iterator<PropertySource<?>> iterator() { return this.propertySourceList.iterator(); } /** * Add the given property source object with highest precedence. */ public void addFirst(PropertySource<?> propertySource) { if (logger.isDebugEnabled()) { logger.debug(String.format("Adding [%s] PropertySource with highest search precedence", propertySource.getName())); } removeIfPresent(propertySource); this.propertySourceList.add(0, propertySource); } /** * Add the given property source object with lowest precedence. */ public void addLast(PropertySource<?> propertySource) { if (logger.isDebugEnabled()) { logger.debug(String.format("Adding [%s] PropertySource with lowest search precedence", propertySource.getName())); } removeIfPresent(propertySource); this.propertySourceList.add(propertySource); } /** * Add the given property source object with precedence immediately higher * than the named relative property source. */ public void addBefore(String relativePropertySourceName, PropertySource<?> propertySource) { if (logger.isDebugEnabled()) { logger.debug(String.format("Adding [%s] PropertySource with search precedence immediately higher than [%s]", propertySource.getName(), relativePropertySourceName)); } assertLegalRelativeAddition(relativePropertySourceName, propertySource); removeIfPresent(propertySource); int index = assertPresentAndGetIndex(relativePropertySourceName); addAtIndex(index, propertySource); } /** * Add the given property source object with precedence immediately lower * than the named relative property source. */ public void addAfter(String relativePropertySourceName, PropertySource<?> propertySource) { if (logger.isDebugEnabled()) { logger.debug(String.format("Adding [%s] PropertySource with search precedence immediately lower than [%s]", propertySource.getName(), relativePropertySourceName)); } assertLegalRelativeAddition(relativePropertySourceName, propertySource); removeIfPresent(propertySource); int index = assertPresentAndGetIndex(relativePropertySourceName); addAtIndex(index + 1, propertySource); } /** * Return the precedence of the given property source, {@code -1} if not found. */ public int precedenceOf(PropertySource<?> propertySource) { return this.propertySourceList.indexOf(propertySource); } /** * Remove and return the property source with the given name, {@code null} if not found. * @param name the name of the property source to find and remove */ public PropertySource<?> remove(String name) { if (logger.isDebugEnabled()) { logger.debug(String.format("Removing [%s] PropertySource", name)); } int index = this.propertySourceList.indexOf(PropertySource.named(name)); return (index != -1 ? this.propertySourceList.remove(index) : null); } /** * Replace the property source with the given name with the given property source object. * @param name the name of the property source to find and replace * @param propertySource the replacement property source * @throws IllegalArgumentException if no property source with the given name is present * @see #contains */ public void replace(String name, PropertySource<?> propertySource) { if (logger.isDebugEnabled()) { logger.debug(String.format("Replacing [%s] PropertySource with [%s]", name, propertySource.getName())); } int index = assertPresentAndGetIndex(name); this.propertySourceList.set(index, propertySource); } /** * Return the number of {@link PropertySource} objects contained. */ public int size() { return this.propertySourceList.size(); } @Override public String toString() { String[] names = new String[this.size()]; for (int i = 0; i < size(); i++) { names[i] = this.propertySourceList.get(i).getName(); } return String.format("[%s]", StringUtils.arrayToCommaDelimitedString(names)); } /** * Ensure that the given property source is not being added relative to itself. */ protected void assertLegalRelativeAddition(String relativePropertySourceName, PropertySource<?> propertySource) { String newPropertySourceName = propertySource.getName(); if (relativePropertySourceName.equals(newPropertySourceName)) { throw new IllegalArgumentException( String.format("PropertySource named [%s] cannot be added relative to itself", newPropertySourceName)); } } /** * Remove the given property source if it is present. */ protected void removeIfPresent(PropertySource<?> propertySource) { this.propertySourceList.remove(propertySource); } /** * Add the given property source at a particular index in the list. */ private void addAtIndex(int index, PropertySource<?> propertySource) { removeIfPresent(propertySource); this.propertySourceList.add(index, propertySource); } /** * Assert that the named property source is present and return its index. * @param name the {@linkplain PropertySource#getName() name of the property source} * to find * @throws IllegalArgumentException if the named property source is not present */ private int assertPresentAndGetIndex(String name) { int index = this.propertySourceList.indexOf(PropertySource.named(name)); if (index == -1) { throw new IllegalArgumentException(String.format("PropertySource named [%s] does not exist", name)); } return index; } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2013 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.sse.db; import java.sql.Clob; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Queue; import java.util.Set; import org.apache.commons.collections.map.LRUMap; import org.apache.log4j.Logger; import org.hsqldb.jdbc.JDBCClob; import org.parosproxy.paros.db.DatabaseException; import org.parosproxy.paros.db.DbUtils; import org.parosproxy.paros.db.paros.ParosAbstractTable; import org.zaproxy.zap.extension.sse.ServerSentEvent; /** Manages writing and reading Server-Sent Event streams and events to the database. */ public class TableEventStream extends ParosAbstractTable { private static final Logger logger = Logger.getLogger(TableEventStream.class); private Set<Integer> streamIds; private LRUMap streamCache; private PreparedStatement psInsertEvent; private PreparedStatement psSelectStreams; private PreparedStatement psInsertStream; private PreparedStatement psUpdateStream; private PreparedStatement psUpdateHistoryFk; private PreparedStatement psDeleteStream; private PreparedStatement psDeleteEventsByStreamId; private PreparedStatement psSelectEvent; private PreparedStatement psSelectMaxStreamId; private Queue<ServerSentEvent> eventBuffer = new LinkedList<>(); private Queue<ServerSentEventStream> streamBuffer = new LinkedList<>(); /** Create tables if not already available */ @Override protected void reconnect(Connection conn) throws DatabaseException { try { if (!DbUtils.hasTable(conn, "EVENT_STREAM")) { // need to create the tables DbUtils.execute( conn, "CREATE CACHED TABLE event_stream (" + "stream_id BIGINT PRIMARY KEY," + "host VARCHAR(255) NOT NULL," + "port INTEGER NOT NULL," + "url VARCHAR(255) NOT NULL," + "start_timestamp TIMESTAMP NOT NULL," + "end_timestamp TIMESTAMP NULL," + "history_id INTEGER NULL," + "FOREIGN KEY (history_id) REFERENCES HISTORY(HISTORYID) ON DELETE SET NULL ON UPDATE SET NULL" + ")"); DbUtils.execute( conn, "CREATE CACHED TABLE event_stream_event (" + "event_id BIGINT NOT NULL," + "stream_id BIGINT NOT NULL," + "timestamp TIMESTAMP NOT NULL," + "last_event_id VARCHAR(255) NOT NULL," + "data CLOB(16M) NOT NULL," + "event_type VARCHAR(255) NOT NULL," + "reconnection_time BIGINT NULL," + "raw_event CLOB(16M) NOT NULL," + "PRIMARY KEY (event_id, stream_id)," + "FOREIGN KEY (stream_id) REFERENCES event_stream(stream_id)" + ")"); streamIds = new HashSet<>(); } else { streamIds = null; } streamCache = new LRUMap(20); // STREAMS psSelectMaxStreamId = conn.prepareStatement( "SELECT MAX(s.stream_id) as stream_id " + "FROM event_stream AS s"); psSelectStreams = conn.prepareStatement( "SELECT s.* " + "FROM event_stream AS s " + "ORDER BY s.stream_id"); // id goes last to be consistent with update query psInsertStream = conn.prepareStatement( "INSERT INTO " + "event_stream (host, port, url, start_timestamp, end_timestamp, history_id, stream_id) " + "VALUES (?,?,?,?,?,?,?)"); psUpdateStream = conn.prepareStatement( "UPDATE event_stream SET " + "host = ?, port = ?, url = ?, start_timestamp = ?, end_timestamp = ?, history_id = ? " + "WHERE stream_id = ?"); psUpdateHistoryFk = conn.prepareStatement( "UPDATE event_stream SET " + "history_id = ? " + "WHERE stream_id = ?"); psDeleteStream = conn.prepareStatement("DELETE FROM event_stream " + "WHERE stream_id = ?"); // EVENTS psSelectEvent = conn.prepareStatement( "SELECT e.* " + "FROM event_stream_event AS e " + "WHERE e.event_id = ? AND e.stream_id = ?"); psInsertEvent = conn.prepareStatement( "INSERT INTO " + "event_stream_event (event_id, stream_id, timestamp, last_event_id, data, event_type, reconnection_time, raw_event) " + "VALUES (?,?,?,?,?,?,?,?)"); psDeleteEventsByStreamId = conn.prepareStatement( "DELETE FROM event_stream_event " + "WHERE stream_id = ?"); if (streamIds == null) { streamIds = new HashSet<>(); PreparedStatement psSelectStreamIds = conn.prepareStatement( "SELECT s.stream_id " + "FROM event_stream AS s " + "ORDER BY s.stream_id"); try { psSelectStreamIds.execute(); ResultSet rs = psSelectStreamIds.getResultSet(); while (rs.next()) { streamIds.add(rs.getInt(1)); } } finally { try { psSelectStreamIds.close(); } catch (SQLException e) { if (logger.isDebugEnabled()) { logger.debug(e.getMessage(), e); } } } } } catch (SQLException e) { throw new DatabaseException(e); } } /** * Prepares a {@link PreparedStatement} instance on the fly. * * @param criteria * @return number of events that fulfill given template * @throws DatabaseException */ public synchronized int getEventCount(ServerSentEvent criteria) throws DatabaseException { return getEventCount(criteria, null); } /** * Prepares a {@link PreparedStatement} instance on the fly. * * @param criteria * @param inScopeStreamIds * @return number of events that fulfill given template * @throws DatabaseException */ public synchronized int getEventCount(ServerSentEvent criteria, List<Integer> inScopeStreamIds) throws DatabaseException { try { String query = "SELECT COUNT(e.stream_id) FROM event_stream_event AS e " + "<where> "; PreparedStatement stmt = buildEventCriteriaStatement(query, criteria, inScopeStreamIds); try { return executeAndGetSingleIntValue(stmt); } finally { stmt.close(); } } catch (SQLException e) { throw new DatabaseException(e); } } private int executeAndGetSingleIntValue(PreparedStatement stmt) throws SQLException { stmt.execute(); ResultSet rs = stmt.getResultSet(); try { if (rs.next()) { return rs.getInt(1); } return 0; } finally { rs.close(); } } public synchronized int getIndexOf(ServerSentEvent criteria, List<Integer> inScopeStreamIds) throws DatabaseException { try { String query = "SELECT COUNT(e.event_id) " + "FROM event_stream_event AS e " + "<where> AND e.event_id < ?"; PreparedStatement stmt = buildEventCriteriaStatement(query, criteria, inScopeStreamIds); int paramsCount = stmt.getParameterMetaData().getParameterCount(); stmt.setInt(paramsCount, criteria.getId()); try { return executeAndGetSingleIntValue(stmt); } finally { stmt.close(); } } catch (SQLException e) { throw new DatabaseException(e); } } public synchronized ServerSentEvent getEvent(int eventId, int streamId) throws DatabaseException { try { psSelectEvent.setInt(1, eventId); psSelectEvent.setInt(2, streamId); psSelectEvent.execute(); List<ServerSentEvent> events = buildEvents(psSelectEvent.getResultSet(), false); if (events.size() != 1) { throw new DatabaseException("Event not found!"); } return events.get(0); } catch (SQLException e) { throw new DatabaseException(e); } } /** * Retrieves list of {@link ServerSentEvent}, but loads only parts of the payload. * * @param criteria * @param inScopeStreamIds * @param offset * @param limit * @param payloadPreviewLength * @return Events that fulfill given template. * @throws DatabaseException */ public synchronized List<ServerSentEvent> getEvents( ServerSentEvent criteria, List<Integer> inScopeStreamIds, int offset, int limit, int payloadPreviewLength) throws DatabaseException { try { String query = "SELECT e.event_id, e.stream_id, e.timestamp, e.last_event_id, e.event_type, e.data, e.reconnection_time, e.raw_event " + "FROM event_stream_event AS e " + "<where> " + "ORDER BY e.timestamp, e.stream_id, e.event_id " + "LIMIT ? " + "OFFSET ?"; PreparedStatement stmt; try { stmt = buildEventCriteriaStatement(query, criteria, inScopeStreamIds); } catch (DatabaseException e) { if (getConnection().isClosed()) { return new ArrayList<>(0); } throw e; } try { int paramsCount = stmt.getParameterMetaData().getParameterCount(); stmt.setInt(paramsCount - 1, limit); stmt.setInt(paramsCount, offset); stmt.execute(); return buildEvents(stmt.getResultSet(), true, payloadPreviewLength); } finally { stmt.close(); } } catch (SQLException e) { throw new DatabaseException(e); } } private List<ServerSentEvent> buildEvents(ResultSet rs, boolean interpretLiteralBytes) throws SQLException { return buildEvents(rs, interpretLiteralBytes, -1); } /** * @param rs * @param interpretLiteralBytes * @param previewLength * @return * @throws DatabaseException */ private List<ServerSentEvent> buildEvents( ResultSet rs, boolean interpretLiteralBytes, int previewLength) throws SQLException { List<ServerSentEvent> events = new ArrayList<>(); try { while (rs.next()) { ServerSentEvent event; int streamId = rs.getInt("stream_id"); // ServerSentEventStream stream = getStream(streamId); event = new ServerSentEvent(); // TODO should I set stream? event.setId(rs.getInt("event_id")); event.setEventType(rs.getString("event_type")); event.setLastEventId(rs.getString("last_event_id")); event.setStreamId(streamId); event.setTime(rs.getTimestamp("timestamp")); if (previewLength == -1) { // load all characters event.setData(rs.getString("data")); } else { Clob clob = rs.getClob("data"); int length = Math.min(previewLength, (int) clob.length()); event.setData(clob.getSubString(1, length)); clob.free(); } Clob clob = rs.getClob("raw_event"); event.setRawEventLength(clob.length()); if (previewLength == -1) { // load all characters event.setRawEvent(rs.getString("raw_event")); } else { int length = Math.min(previewLength, (int) clob.length()); event.setRawEvent(clob.getSubString(1, length)); } clob.free(); events.add(event); } } finally { rs.close(); } return events; } public ServerSentEventStream getStream(int streamId) throws DatabaseException { if (!streamCache.containsKey(streamId)) { ServerSentEventStream criteria = new ServerSentEventStream(); criteria.setId(streamId); List<ServerSentEventStream> streams = getStreams(criteria); if (streams.size() == 1) { streamCache.put(streamId, streams.get(0)); } else { throw new DatabaseException("Stream '" + streamId + "' not found!"); } } return (ServerSentEventStream) streamCache.get(streamId); } private PreparedStatement buildEventCriteriaStatement( String query, ServerSentEvent criteria, List<Integer> inScopeStreamIds) throws DatabaseException, SQLException { List<String> where = new ArrayList<>(); List<Object> params = new ArrayList<>(); Integer streamId = criteria.getStreamId(); if (streamId != null) { where.add("s.stream_id = ?"); params.add(streamId); } if (inScopeStreamIds != null) { StringBuilder whereExpr = new StringBuilder("s.stream_id IN ("); int inScopeStreamCount = inScopeStreamIds.size(); if (inScopeStreamCount > 0) { for (int i = 0; i < inScopeStreamCount; i++) { params.add(inScopeStreamIds.get(i)); whereExpr.append("?"); if ((i + 1) < inScopeStreamCount) { whereExpr.append(","); } } } else { whereExpr.append("null"); } whereExpr.append(")"); where.add(whereExpr.toString()); } return buildCriteriaStatementHelper(query, where, params); } public EventStreamPrimaryKey getEventPrimaryKey(ServerSentEvent event) { return new EventStreamPrimaryKey(event.getStreamId(), event.getId()); } public List<ServerSentEventStream> getStreamItems() throws DatabaseException { try { psSelectStreams.execute(); ResultSet rs = psSelectStreams.getResultSet(); return buildStreams(rs); } catch (SQLException e) { throw new DatabaseException(e); } } private List<ServerSentEventStream> buildStreams(ResultSet rs) throws SQLException { List<ServerSentEventStream> streams = new ArrayList<>(); try { while (rs.next()) { ServerSentEventStream stream = new ServerSentEventStream(); stream.setId(rs.getInt("stream_id")); stream.setHost(rs.getString("host")); stream.setPort(rs.getInt("port")); stream.setUrl(rs.getString("url")); stream.setStartTimestamp(rs.getTimestamp("start_timestamp").getTime()); Time endTs = rs.getTime("end_timestamp"); stream.setEndTimestamp((endTs != null) ? endTs.getTime() : null); stream.setHistoryId(rs.getInt("history_id")); streams.add(stream); } } finally { rs.close(); } return streams; } public void insertOrUpdateStream(ServerSentEventStream stream) throws DatabaseException { synchronized (this) { try { if (getConnection().isClosed()) { // temporarily buffer streams and insert/update later streamBuffer.offer(stream); return; } do { PreparedStatement stmt; boolean addIdOnSuccess = false; // first, find out if already inserted if (streamIds.contains(stream.getId())) { // proceed with update stmt = psUpdateStream; } else { // proceed with insert stmt = psInsertStream; addIdOnSuccess = true; if (logger.isDebugEnabled()) { logger.debug("insert stream: " + stream.toString()); } } Long startTs = stream.getStartTimestamp(); Long endTs = stream.getEndTimestamp(); stmt.setString(1, stream.getHost()); stmt.setInt(2, stream.getPort()); stmt.setString(3, stream.getUrl()); stmt.setTimestamp(4, (startTs != null) ? new Timestamp(startTs) : null); stmt.setTimestamp(5, (endTs != null) ? new Timestamp(endTs) : null); stmt.setNull(6, Types.INTEGER); stmt.setInt(7, stream.getId()); stmt.execute(); if (addIdOnSuccess) { streamIds.add(stream.getId()); } if (stream.getHistoryId() != null) { psUpdateHistoryFk.setInt(1, stream.getHistoryId()); psUpdateHistoryFk.setInt(2, stream.getId()); try { psUpdateHistoryFk.execute(); } catch (SQLException e) { // safely ignore this exception // on shutdown, the history table is cleaned before // event streams are closed and updated if (logger.isDebugEnabled()) { logger.debug(e.getMessage(), e); } } } stream = streamBuffer.poll(); } while (stream != null); } catch (SQLException e) { throw new DatabaseException(e); } } } public void insertEvent(ServerSentEvent event) throws DatabaseException { // synchronize on whole object to avoid race conditions with insertOrUpdateStreams() synchronized (this) { try { if (getConnection().isClosed()) { // temporarily buffer events and write them the next time eventBuffer.offer(event); return; } do { while (!streamIds.contains(event.getStreamId())) { // maybe stream is buffered if (streamBuffer.size() > 0) { insertOrUpdateStream(streamBuffer.poll()); continue; } throw new DatabaseException("stream not inserted: " + event.getStreamId()); } if (logger.isDebugEnabled()) { logger.debug("insert event: " + event.toString()); } psInsertEvent.setInt(1, event.getId()); psInsertEvent.setInt(2, event.getStreamId()); psInsertEvent.setTimestamp(3, new Timestamp(event.getTimestamp())); psInsertEvent.setString(4, event.getLastEventId()); psInsertEvent.setClob(5, new JDBCClob(event.getData())); psInsertEvent.setString(6, event.getEventType()); Integer time; if ((time = event.getReconnectionTime()) == null) { psInsertEvent.setNull(7, java.sql.Types.INTEGER); } else { psInsertEvent.setInt(7, time); } psInsertEvent.setClob(8, new JDBCClob(event.getRawEvent())); psInsertEvent.execute(); event = eventBuffer.poll(); } while (event != null); } catch (SQLException e) { throw new DatabaseException(e); } } } public List<ServerSentEventStream> getStreams(ServerSentEventStream criteria) throws DatabaseException { try { String query = "SELECT s.* " + "FROM event_stream AS s " + "<where> " + "ORDER BY s.start_timestamp, s.stream_id"; PreparedStatement stmt; try { stmt = buildEventCriteriaStatement(query, criteria); } catch (DatabaseException e) { if (getConnection().isClosed()) { return new ArrayList<>(0); } throw e; } stmt.execute(); return buildStreams(stmt.getResultSet()); } catch (SQLException e) { throw new DatabaseException(e); } } private PreparedStatement buildEventCriteriaStatement( String query, ServerSentEventStream criteria) throws SQLException, DatabaseException { List<String> where = new ArrayList<>(); List<Object> params = new ArrayList<>(); Integer id = criteria.getId(); if (id != null) { where.add("s.stream_id = ?"); params.add(id); } return buildCriteriaStatementHelper(query, where, params); } private PreparedStatement buildCriteriaStatementHelper( String query, List<String> where, List<Object> params) throws SQLException, DatabaseException { int conditionsCount = where.size(); if (conditionsCount > 0) { StringBuilder whereExpr = new StringBuilder(); int i = 0; for (String condition : where) { whereExpr.append(condition); i++; if (i < conditionsCount) { // one more will be appended whereExpr.append(" AND "); } } query = query.replace("<where>", "WHERE " + whereExpr.toString()); } else { query = query.replace("<where> AND", "WHERE "); query = query.replace("<where> ", ""); } PreparedStatement stmt = getConnection().prepareStatement(query); try { int i = 1; for (Object param : params) { stmt.setObject(i++, param); } } catch (SQLException e) { stmt.close(); throw e; } return stmt; } /** * Deletes all entries from given streamId from database. * * @param streamId * @throws DatabaseException */ public void purgeStream(Integer streamId) throws DatabaseException { synchronized (this) { try { if (streamIds.contains(streamId)) { psDeleteEventsByStreamId.setInt(1, streamId); psDeleteEventsByStreamId.execute(); psDeleteStream.setInt(1, streamId); psDeleteStream.execute(); streamIds.remove(streamId); } } catch (SQLException e) { throw new DatabaseException(e); } } } /** * @return current maximum value of the stream_id column * @throws DatabaseException */ public int getMaxStreamId() throws DatabaseException { synchronized (this) { try { return executeAndGetSingleIntValue(psSelectMaxStreamId); } catch (SQLException e) { throw new DatabaseException(e); } } } }
/* * Copyright 2014 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.teiid.webui.client.widgets.table; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.enterprise.event.Event; import javax.inject.Inject; import org.teiid.webui.client.dialogs.UiEvent; import org.teiid.webui.client.dialogs.UiEventType; import org.teiid.webui.client.widgets.CheckableNameTypeRow; import org.teiid.webui.share.Constants; import com.google.gwt.cell.client.Cell.Context; import com.google.gwt.cell.client.CheckboxCell; import com.google.gwt.cell.client.FieldUpdater; import com.google.gwt.dom.client.Element; import com.google.gwt.dom.client.InputElement; import com.google.gwt.dom.client.NativeEvent; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.user.cellview.client.Column; import com.google.gwt.user.cellview.client.Header; import com.google.gwt.user.cellview.client.TextColumn; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.Label; import com.google.gwt.user.client.ui.VerticalPanel; import com.google.gwt.user.client.ui.Widget; import com.google.gwt.view.client.SelectionModel; /** * Composite for display of Table names. Contains checkboxes next to the rows which fire event * when the checkbox state is changed. */ public class TableNamesTable extends Composite { @Inject Event<UiEvent> uiEvent; private static String COLUMN_HEADER_NAME = "Tables"; private static int TABLE_HEIGHT_PX = 200; private static int TABLE_WIDTH_PX = 280; private static int TABLE_VISIBLE_ROWS = 7; protected VerticalPanel panel = new VerticalPanel(); protected Label label = new Label(); private SimpleTable<CheckableNameTypeRow> table; private CheckboxHeader cbHeader; private String owner; private boolean showHeader = true; private boolean disableUncheckedRows = false; private DisableableCheckboxCell checkboxCell = new DisableableCheckboxCell(this.disableUncheckedRows,true,false); private Widget tablePanel; public TableNamesTable( ) { initWidget( panel ); tablePanel = createTablePanel(); panel.add(tablePanel); } public void setOwner(String owner) { this.owner = owner; } public void setShowHeader(boolean showHeader) { if(showHeader!=this.showHeader) { this.showHeader = showHeader; // Remove current table panel.remove(tablePanel); // Re-init tablePanel = createTablePanel(); panel.add(tablePanel); } } public void setDisableUncheckedRows(boolean disableUnchecked) { this.checkboxCell.setDisableIfUnchecked(disableUnchecked); table.redraw(); } public void setCheckedState(String tableName, boolean isChecked) { List<CheckableNameTypeRow> rows = table.getRowData(); for(CheckableNameTypeRow row : rows) { if(row.getName().equalsIgnoreCase(tableName)) { row.setChecked(isChecked); break; } } } /** * Create the panel * @return the panel widget */ protected Widget createTablePanel() { table = new SimpleTable<CheckableNameTypeRow>(TABLE_HEIGHT_PX,TABLE_WIDTH_PX,TABLE_VISIBLE_ROWS); // Add Checkbox column Column<CheckableNameTypeRow, Boolean> checkboxColumn = new Column<CheckableNameTypeRow, Boolean>(checkboxCell) { @Override public Boolean getValue(CheckableNameTypeRow object) { if(object == null) return false; return object.isChecked(); } }; checkboxColumn.setFieldUpdater(new FieldUpdater<CheckableNameTypeRow, Boolean>() { public void update(int index, CheckableNameTypeRow object, Boolean value) { object.setChecked(value); boolean allRowsSame = true; List<CheckableNameTypeRow> tableRows = table.getRowData(); boolean firstState = false; for(int i=0; i<tableRows.size(); i++) { CheckableNameTypeRow row = tableRows.get(i); if(i==0) { firstState = row.isChecked(); } else { boolean thisState = row.isChecked(); if(thisState!=firstState) { allRowsSame = false; break; } } } if(allRowsSame) { cbHeader.setValue(firstState); } else { cbHeader.setValue(false); } table.redrawHeaders(); fireCheckboxEvent(object.getName(),value); } }); // Checkbox Header cbHeader = createCBHeader(false); if(showHeader) { table.addColumn(checkboxColumn, cbHeader); } else { table.addColumn(checkboxColumn); } table.setColumnWidth(checkboxColumn, 40, Unit.PX); // -------------- // Name Column // -------------- TextColumn<CheckableNameTypeRow> nameColumn = new TextColumn<CheckableNameTypeRow>() { public String getValue( CheckableNameTypeRow row ) { return row.getName(); } }; if(showHeader) { table.addColumn( nameColumn, COLUMN_HEADER_NAME ); } else { table.addColumn( nameColumn ); } table.setColumnWidth(nameColumn, 200, Unit.PX); VerticalPanel verticalPanel = new VerticalPanel(); verticalPanel.add(table); return verticalPanel; } /* * Fires Ui event when a table checkbox is checked or unchecked */ private void fireCheckboxEvent(String tableName,boolean isChecked) { UiEvent theEvent = null; if(isChecked) { theEvent = new UiEvent(UiEventType.TABLE_NAME_TABLE_CHECKBOX_CHECKED); } else { theEvent = new UiEvent(UiEventType.TABLE_NAME_TABLE_CHECKBOX_UNCHECKED); } theEvent.setEventSource(this.owner); theEvent.setTableName(tableName); uiEvent.fire(theEvent); } private CheckboxHeader createCBHeader(boolean isChecked) { CheckboxHeader cbHeader = new CheckboxHeader(new CheckboxCell(),false) { @Override protected void headerUpdated(boolean checkState){ List<CheckableNameTypeRow> tableRows = table.getRowData(); for(CheckableNameTypeRow aRow : tableRows) { aRow.setChecked(checkState); } UiEvent theEvent = new UiEvent(UiEventType.TABLE_NAME_TABLE_CHECKBOX_CHECKED); theEvent.setEventSource(owner); uiEvent.fire(theEvent); table.redraw(); } }; return cbHeader; } public void clear() { setData(Collections.<CheckableNameTypeRow>emptyList()); } public String getSelectedRowString() { StringBuilder sb = new StringBuilder(); List<CheckableNameTypeRow> rows = table.getRowData(); for(CheckableNameTypeRow row : rows) { if(row.isChecked()) { if(!sb.toString().isEmpty()) { sb.append(Constants.COMMA); } sb.append(row.getName()); } } return sb.toString(); } public List<String> getSelectedTableNames() { List<String> colNames = new ArrayList<String>(); List<CheckableNameTypeRow> rows = table.getRowData(); for(CheckableNameTypeRow row : rows) { if(row.isChecked() && row.getName()!=null) { colNames.add(row.getName()); } } return colNames; } public List<String> getSelectedTableTypes() { List<String> colTypes = new ArrayList<String>(); List<CheckableNameTypeRow> rows = table.getRowData(); for(CheckableNameTypeRow row : rows) { if(row.isChecked() && row.getType()!=null) { colTypes.add(row.getType()); } } return colTypes; } public void setData(List<CheckableNameTypeRow> rows) { // Resets table rows table.setRowData(rows); // Header checkbox initially unchecked cbHeader.setValue(false); table.redrawHeaders(); } public List<CheckableNameTypeRow> getData() { return table.getRowData(); } public void setSelectionModel( final SelectionModel<CheckableNameTypeRow> selectionModel ) { table.setSelectionModel( selectionModel ); } /** * Checkbox Header */ private class CheckboxHeader extends Header<Boolean> { Boolean checkedState = false; public CheckboxHeader(CheckboxCell cell, boolean isChecked) { super(cell); checkedState = isChecked; } public void setValue(boolean isChecked) { checkedState = isChecked; } @Override public Boolean getValue() { return checkedState; } @Override public void onBrowserEvent(Context context, Element elem, NativeEvent event) { InputElement input = elem.getFirstChild().cast(); checkedState = input.isChecked(); headerUpdated(checkedState); } protected void headerUpdated(boolean isChecked) { // override this method in defining class } } }
// Copyright (c) 2003-present, Jodd Team (http://jodd.org) // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. package jodd.json; import jodd.json.impl.ObjectJsonSerializer; import jodd.json.mock.Address; import jodd.json.mock.Employee; import jodd.json.mock.Friend; import jodd.json.mock.Hill; import jodd.json.mock.Mountain; import jodd.json.mock.Network; import jodd.json.mock.Person; import jodd.json.mock.Phone; import jodd.json.mock.Spiderman; import jodd.json.mock.Surfer; import jodd.json.mock.TestClass2; import jodd.json.mock.TestClass3; import jodd.json.mock.Zipcode; import jodd.json.model.ListContainer; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.io.FileNotFoundException; import java.io.InputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class JSONSerializationTest { private Person jodder; private Person modesty; private Map colors; private List people; private Network network; private Zipcode pedroZip; private Employee dilbert; @Before @SuppressWarnings({"unchecked"}) public void setUp() { DataCreator dataCreator = new DataCreator(); pedroZip = new Zipcode("848485"); Person pedro = dataCreator.createPedro(); jodder = dataCreator.createJodder(); modesty = dataCreator.createModesty(); colors = dataCreator.createColorMap(); people = new ArrayList(); people.add(jodder); people.add(modesty); people.add(pedro); dilbert = dataCreator.createDilbert(); network = dataCreator.createNetwork("My Network", jodder, modesty); } @After public void tearDown() { JoddJson.classMetadataName = null; } @Test public void testObject() { JoddJson.classMetadataName = "class"; JsonSerializer serializer = new JsonSerializer(); String jodderJson = serializer.serialize(jodder); assertStringValue(Person.class.getName(), jodderJson); assertAttribute("firstname", jodderJson); assertStringValue("Igor", jodderJson); assertAttribute("lastname", jodderJson); assertStringValue("Spasic", jodderJson); assertAttribute("work", jodderJson); assertAttribute("home", jodderJson); assertAttribute("street", jodderJson); assertStringValue(Address.class.getName(), jodderJson); assertAttribute("zipcode", jodderJson); assertStringValue(Zipcode.class.getName(), jodderJson); assertAttributeMissing("person", jodderJson); assertAttributeMissing("phones", jodderJson); assertStringValueMissing(Phone.class.getName(), jodderJson); assertAttributeMissing("hobbies", jodderJson); JsonSerializer jdrSerializer = new JsonSerializer(); jdrSerializer.exclude("home", "work"); String modestyJson = jdrSerializer.serialize(modesty); assertStringValue(Person.class.getName(), modestyJson); assertAttribute("firstname", modestyJson); assertStringValue("Modesty", modestyJson); assertAttribute("lastname", modestyJson); assertStringValue("Blase", modestyJson); assertAttribute("birthdate", modestyJson); assertStringValueMissing(Address.class.getName(), modestyJson); assertAttributeMissing("work", modestyJson); assertAttributeMissing("home", modestyJson); assertAttributeMissing("street", modestyJson); assertAttributeMissing("city", modestyJson); assertAttributeMissing("state", modestyJson); assertStringValueMissing(Zipcode.class.getName(), modestyJson); assertAttributeMissing("zipcode", modestyJson); assertStringValueMissing(Phone.class.getName(), modestyJson); assertAttributeMissing("hobbies", modestyJson); assertAttributeMissing("person", modestyJson); serializer.exclude("home.zipcode", "work.zipcode"); String json2 = serializer.serialize(jodder); assertStringValue(Person.class.getName(), json2); assertAttribute("work", json2); assertAttribute("home", json2); assertAttribute("street", json2); assertStringValue(Address.class.getName(), json2); assertAttributeMissing("zipcode", json2); assertAttributeMissing("phones", json2); assertStringValueMissing(Zipcode.class.getName(), json2); assertStringValueMissing(Phone.class.getName(), json2); assertAttributeMissing("hobbies", json2); assertAttributeMissing("type", json2); assertStringValueMissing("PAGER", json2); serializer.include("hobbies").exclude(true, "phones.areaCode", "phones.exchange", "phones.number"); String json3 = serializer.serialize(jodder); assertStringValue(Person.class.getName(), json3); assertAttribute("work", json3); assertAttribute("home", json3); assertAttribute("street", json3); assertStringValue(Address.class.getName(), json3); assertAttribute("phones", json3); assertAttribute("phoneNumber", json3); assertStringValue(Phone.class.getName(), json3); assertAttribute("hobbies", json3); assertAttributeMissing("zipcode", json3); assertAttributeMissing(Zipcode.class.getName(), json3); assertAttributeMissing("areaCode", json3); assertAttributeMissing("exchange", json3); assertAttributeMissing("number", json3); assertAttribute("type", json3); assertStringValue("PAGER", json3); assertTrue(json3.startsWith("{")); assertTrue(json3.endsWith("}")); } @Test public void testMap() { JsonSerializer serializer = new JsonSerializer(); String colorsJson = serializer.serialize(colors); for (Object o : colors.entrySet()) { Map.Entry entry = (Map.Entry) o; assertAttribute(entry.getKey().toString(), colorsJson); assertStringValue(entry.getValue().toString(), colorsJson); } assertTrue(colorsJson.startsWith("{")); assertTrue(colorsJson.endsWith("}")); colors.put(null, "#aaaaaa"); colors.put("orange", null); String json = serializer.serialize(colors); assertTrue(json.contains("null:")); assertStringValue("#aaaaaa", json); assertAttribute("orange", json); assertTrue(json.contains(":null")); } @Test public void testArray() { int[] array = new int[30]; for (int i = 0; i < array.length; i++) { array[i] = i; } String json = new JsonSerializer().serialize(array); for (int i = 0; i < array.length; i++) { assertNumber(i, json); } assertFalse(json.contains("\"")); assertFalse(json.contains("\'")); } @Test public void testCollection() { JsonSerializer serializer = new JsonSerializer(); String colorsJson = serializer.serialize(colors.values()); for (Object o : colors.entrySet()) { Map.Entry entry = (Map.Entry) o; assertAttributeMissing(entry.getKey().toString(), colorsJson); assertStringValue(entry.getValue().toString(), colorsJson); } assertTrue(colorsJson.startsWith("[")); assertTrue(colorsJson.endsWith("]")); } @Test public void testString() { assertSerializedTo("Hello", "\"Hello\""); assertSerializedTo("Hello\nWorld", "\"Hello\\nWorld\""); assertSerializedTo("Hello 'Big Boy'", "\"Hello 'Big Boy'\""); assertSerializedTo("Fly \"you fools\"", "\"Fly \\\"you fools\\\"\""); assertSerializedTo("</script>", "\"<\\/script>\""); } @Test public void testListOfObjects() { JoddJson.classMetadataName = "class"; JsonSerializer serializer = new JsonSerializer(); String peopleJson = serializer.serialize(people); assertStringValue(Person.class.getName(), peopleJson); assertAttribute("firstname", peopleJson); assertStringValue("Igor", peopleJson); assertStringValue("Modesty", peopleJson); assertAttribute("lastname", peopleJson); assertStringValue("Spasic", peopleJson); assertStringValue(Address.class.getName(), peopleJson); assertStringValue("Pedro", peopleJson); assertStringValue("Sanchez", peopleJson); serializer = new JsonSerializer().exclude("home", "work"); peopleJson = serializer.serialize(people); assertStringValue(Person.class.getName(), peopleJson); assertAttribute("firstname", peopleJson); assertStringValue("Igor", peopleJson); assertStringValue("Modesty", peopleJson); assertAttribute("lastname", peopleJson); assertStringValue("Spasic", peopleJson); assertStringValueMissing(Address.class.getName(), peopleJson); } @Test public void testDeepIncludes() { JsonSerializer serializer = new JsonSerializer(); String peopleJson = serializer.include("people.hobbies").serialize(network); assertAttribute("name", peopleJson); assertStringValue("My Network", peopleJson); assertAttribute("firstname", peopleJson); assertStringValue("Igor", peopleJson); assertStringValue("Modesty", peopleJson); assertAttribute("lastname", peopleJson); assertStringValue("Spasic", peopleJson); assertAttribute("hobbies", peopleJson); assertStringValue("read", peopleJson); } @Test public void testDates() { JsonSerializer serializer = new JsonSerializer(); String peopleJson = serializer.exclude("home", "work").serialize(jodder); assertAttribute("firstname", peopleJson); assertStringValue("Igor", peopleJson); assertNumber(jodder.getBirthdate().getTime(), peopleJson); assertStringValueMissing("java.util.Date", peopleJson); } @Test public void testSimpleShallowWithListInMap() { JsonSerializer serializer = new JsonSerializer(); Map wrapper = new HashMap(); wrapper.put("name", "Joe Blow"); wrapper.put("people", people); String peopleJson = serializer.serialize(wrapper); assertFalse(peopleJson.contains("[")); serializer.include("people.*"); peopleJson = serializer.serialize(wrapper); assertTrue(peopleJson.contains("[")); } @Test public void testSimpleShallowWithListInObject() { JsonSerializer serializer = new JsonSerializer(); ListContainer wrapper = new ListContainer(); wrapper.setName("Joe Blow"); wrapper.setPeople(people); String peopleJson = serializer.serialize(wrapper); assertFalse(peopleJson.contains("[")); } @Test public void testSetIncludes() { JsonSerializer serializer = new JsonSerializer(); serializer.include("people.hobbies", "phones", "home", "people.resume"); assertEquals(4, serializer.rules.totalRules()); assertEquals("[people.hobbies]", serializer.rules.getRule(0).toString()); assertEquals("[phones]", serializer.rules.getRule(1).toString()); assertEquals("[home]", serializer.rules.getRule(2).toString()); assertEquals("[people.resume]", serializer.rules.getRule(3).toString()); } @Test public void testDeepSerialization() { JsonSerializer serializer = new JsonSerializer(); String peopleJson = serializer.deep(true).serialize(network); assertAttribute("name", peopleJson); assertStringValue("My Network", peopleJson); assertAttribute("firstname", peopleJson); assertStringValue("Igor", peopleJson); assertStringValue("Modesty", peopleJson); assertAttribute("lastname", peopleJson); assertStringValue("Spasic", peopleJson); assertAttributeMissing("hobbies", peopleJson); // annotation explicitly excludes this assertStringValueMissing("read", peopleJson); } @Test public void testDeepSerializationWithIncludeOverrides() { JsonSerializer serializer = new JsonSerializer(); String peopleJson = serializer.include("people.hobbies").deep(true).serialize(network); assertAttribute("firstname", peopleJson); assertStringValue("Igor", peopleJson); assertAttribute("hobbies", peopleJson); assertStringValue("read", peopleJson); assertStringValue("run", peopleJson); assertStringValue("code", peopleJson); } @Test public void testDeepSerializationWithExcludes() { JsonSerializer serializer = new JsonSerializer(); String peopleJson = serializer.exclude("people.work").deep(true).serialize(network); assertAttribute("firstname", peopleJson); assertStringValue("Igor", peopleJson); assertAttributeMissing("work", peopleJson); assertStringValue("173 Hackers Drive", peopleJson); assertAttribute("home", peopleJson); assertAttribute("phones", peopleJson); } @Test public void testDeepSerializationCycles() { JsonSerializer serializer = new JsonSerializer(); String json = serializer.deep(true).serialize(people); assertAttribute("zipcode", json); assertEquals(2, occurs(pedroZip.getZipcode(), json)); assertAttributeMissing("person", json); } @Test public void testSerializeSuperClass() { JsonSerializer serializer = new JsonSerializer(); String json = serializer.serialize(dilbert); assertAttribute("company", json); assertStringValue("Initech", json); assertAttribute("firstname", json); assertStringValue("Dilbert", json); } @Test public void testSerializePublicFields() { Spiderman spiderman = new Spiderman(); JsonSerializer serializer = new JsonSerializer(); String json = serializer.serialize(spiderman); assertAttribute("spideySense", json); assertAttribute("superpower", json); assertStringValue("Creates web", json); } @Test public void testExcludingPublicFields() { Spiderman spiderman = new Spiderman(); String json = new JsonSerializer().exclude("superpower").serialize(spiderman); assertAttributeMissing("superpower", json); assertAttribute("spideySense", json); } @Test public void testWildcards() { JoddJson.classMetadataName = "class"; JsonSerializer serializer = new JsonSerializer(); String json = serializer.include("phones").exclude("*.class").serialize(jodder); assertAttributeMissing("class", json); assertAttribute("phones", json); assertAttributeMissing("hobbies", json); } @Test public void testExclude() { String json = new JsonSerializer().serialize(jodder); assertAttribute("firstname", json); assertAttributeMissing("number", json); assertAttributeMissing("exchange", json); assertAttributeMissing("areaCode", json); json = new JsonSerializer().include("phones").serialize(jodder); assertAttribute("firstname", json); assertAttribute("number", json); assertAttribute("exchange", json); assertAttribute("areaCode", json); json = new JsonSerializer().include("phones").exclude("phones.areaCode").serialize(jodder); assertAttribute("firstname", json); assertAttribute("number", json); assertAttribute("exchange", json); assertAttributeMissing("areaCode", json); } @Test public void testExcludeAll() { JsonSerializer serializer = new JsonSerializer(); String json = serializer.exclude("*").serialize(jodder); assertEquals("{}", json); } @Test public void testMixedWildcards() { JsonSerializer serializer = new JsonSerializer(); serializer.include("firstname", "lastname").exclude("*"); String json = serializer.serialize(jodder); assertAttribute("firstname", json); assertStringValue("Igor", json); assertAttribute("lastname", json); assertStringValue("Spasic", json); assertAttributeMissing("class", json); assertAttributeMissing("phones", json); assertAttributeMissing("birthdate", json); serializer = new JsonSerializer(); serializer.include("firstname", "lastname", "phones.areaCode", "phones.exchange", "phones.number").exclude("*"); json = serializer.serialize(jodder); assertAttribute("firstname", json); assertStringValue("Igor", json); assertAttribute("lastname", json); assertStringValue("Spasic", json); assertAttributeMissing("class", json); assertAttribute("phones", json); assertAttributeMissing("birthdate", json); } @Test public void testCopyOnWriteList() { CopyOnWriteArrayList<Person> people = new CopyOnWriteArrayList<>(); people.add(jodder); people.add(modesty); String json = new JsonSerializer().serialize(people); assertAttribute("firstname", json); assertStringValue("Igor", json); assertStringValue("Modesty", json); } @Test public void testAnnotations() { HashMap<String, TestClass3> map = new HashMap<>(); map.put("String1", new TestClass3()); TestClass2 testElement = new TestClass2(); testElement.setMapOfJustice(map); String json = new JsonSerializer().serialize(testElement); assertAttributeMissing("mapOfJustice", json); assertAttributeMissing("name", json); assertEquals(-1, json.indexOf("testName2")); json = new JsonSerializer().include("mapOfJustice").serialize(testElement); assertAttribute("mapOfJustice", json); // make sure the name property value is missing! assertAttributeMissing( "name", json ) // conflicts since mapOfJustice contains an object with name in it assertEquals(-1, json.indexOf("testName2")); } @Test public void testTransient() { TestClass2 testElement = new TestClass2(); String json = new JsonSerializer().serialize(testElement); assertAttributeMissing("description", json); json = new JsonSerializer().include("description").serialize(testElement); assertAttribute("description", json); } @Test public void testSettersWithoutGettersAreMissing() { Friend friend = new Friend("Nugget", "Donkey Rider", "Slim"); String json = new JsonSerializer().include("*").serialize(friend); assertAttribute("nicknames", json); assertAttributeMissing("nicknamesAsArray", json); } @Test public void testIncludesExcludes() throws FileNotFoundException { Surfer surfer = Surfer.createSurfer(); String json = new JsonSerializer().serialize(surfer); assertAttribute("name", json); assertStringValue("jodd", json); assertAttribute("id", json); assertAttribute("split", json); assertAttribute("skill", json); assertAttribute("pipe", json); assertAttributeMissing("phones", json); // exclude pipe json = new JsonSerializer().excludeTypes(InputStream.class).serialize(surfer); assertAttribute("name", json); assertStringValue("jodd", json); assertAttribute("id", json); assertAttribute("split", json); assertAttribute("skill", json); assertAttributeMissing("pipe", json); assertAttributeMissing("phones", json); // exclude pipe (alt) json = new JsonSerializer().excludeTypes("*Stream").serialize(surfer); assertAttribute("name", json); assertStringValue("jodd", json); assertAttribute("id", json); assertAttribute("split", json); assertAttribute("skill", json); assertAttributeMissing("pipe", json); assertAttributeMissing("phones", json); // exclude s*, include phones json = new JsonSerializer().exclude("split").include("phones").excludeTypes("*Stream").serialize(surfer); assertAttribute("name", json); assertStringValue("jodd", json); assertAttribute("id", json); assertAttributeMissing("split", json); assertAttribute("skill", json); assertAttributeMissing("pipe", json); assertAttribute("phones", json); assertAttribute("exchange", json); json = new JsonSerializer().exclude("split").include("phones").exclude("phones.exchange").serialize(surfer); assertAttribute("phones", json); assertAttributeMissing("exchange", json); } @Test public void testSuperclass() { Hill hill = new Hill(); hill.setHeight("qwe"); hill.setName("aaa"); String json = new JsonSerializer().serialize(hill); assertAttribute("height", json); assertAttributeMissing("name", json); Mountain mountain = new Mountain(); mountain.setName("bbb"); mountain.setHeight("123"); mountain.setWild(true); JoddJson.serializationSubclassAware = false; JoddJson.annotationManager.reset(); json = new JsonSerializer().serialize(mountain); assertAttribute("height", json); assertAttribute("name", json); assertAttribute("wild", json); JoddJson.serializationSubclassAware = true; JoddJson.annotationManager.reset(); json = new JsonSerializer().serialize(mountain); assertAttribute("height", json); assertAttributeMissing("name", json); assertAttributeMissing("wild", json); } public static class Lucy { String name = "Lucy"; String address = null; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } } @Test public void testSerializeWithCustomBeanSerializer() { JsonSerializer jsonSerializer = new JsonSerializer(); Lucy lucy = new Lucy(); String json = jsonSerializer.serialize(lucy); assertAttribute("address", json); assertAttribute("name", json); jsonSerializer.use(Object.class, new ObjectJsonSerializer() { public void serializeValue(final JsonContext jsonContext, Object value) { jsonContext.writeOpenObject(); BeanSerializer beanVisitor = new BeanSerializer(jsonContext, value) { @Override protected void onSerializableProperty(String propertyName, Class propertyType, Object value) { if (value == null) { return; } super.onSerializableProperty(propertyName, propertyType, value); } }; beanVisitor.serialize(); jsonContext.writeCloseObject(); } }); json = jsonSerializer.serialize(lucy); assertEquals("{\"name\":\"Lucy\"}", json); } // ---------------------------------------------------------------- custom asserts private int occurs(String str, String json) { int current = 0; int count = 0; while (current >= 0) { current = json.indexOf(str, current); if (current > 0) { count++; current += str.length(); } } return count; } private void assertAttributeMissing(String attribute, String json) { assertAttribute(attribute, json, false); } private void assertAttribute(String attribute, String json) { assertAttribute(attribute, json, true); } private void assertAttribute(String attribute, String json, boolean isPresent) { if (isPresent) { assertTrue(json.contains("\"" + attribute + "\":")); } else { assertFalse(json.contains("\"" + attribute + "\":")); } } private void assertStringValue(String value, String json, boolean isPresent) { if (isPresent) { assertTrue(json.contains("\"" + value + "\"")); } else { assertFalse(json.contains("\"" + value + "\"")); } } private void assertNumber(Number number, String json) { assertTrue(json.contains(number.toString())); } private void assertStringValueMissing(String value, String json) { assertStringValue(value, json, false); } private void assertStringValue(String value, String json) { assertStringValue(value, json, true); } private void assertSerializedTo(String original, String expected) { JsonSerializer serializer = new JsonSerializer(); String json = serializer.serialize(original); assertEquals(expected, json); } }
/* * Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.examples.marketdata; import static com.opengamma.strata.collect.Guavate.toImmutableList; import java.io.File; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Path; import java.time.LocalDate; import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.SortedMap; import java.util.TreeMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.collect.ListMultimap; import com.google.common.collect.Maps; import com.opengamma.strata.basics.currency.Currency; import com.opengamma.strata.basics.currency.FxRate; import com.opengamma.strata.collect.Messages; import com.opengamma.strata.collect.io.ResourceLocator; import com.opengamma.strata.collect.timeseries.LocalDateDoubleTimeSeries; import com.opengamma.strata.data.FxRateId; import com.opengamma.strata.data.ImmutableMarketData; import com.opengamma.strata.data.ImmutableMarketDataBuilder; import com.opengamma.strata.data.ObservableId; import com.opengamma.strata.loader.csv.FixingSeriesCsvLoader; import com.opengamma.strata.loader.csv.QuotesCsvLoader; import com.opengamma.strata.loader.csv.RatesCurvesCsvLoader; import com.opengamma.strata.market.curve.CurveId; import com.opengamma.strata.market.curve.RatesCurveGroup; import com.opengamma.strata.market.observable.QuoteId; import com.opengamma.strata.measure.rate.RatesMarketDataLookup; /** * Builds a market data snapshot from user-editable files in a prescribed directory structure. * <p> * Descendants of this class provide the ability to source this directory structure from any * location. * <p> * The directory structure must look like: * <ul> * <li>root * <ul> * <li>curves * <ul> * <li>groups.csv * <li>settings.csv * <li>one or more curve CSV files * </ul> * <li>historical-fixings * <ul> * <li>one or more time-series CSV files * </ul> * </ul> * </ul> */ public abstract class ExampleMarketDataBuilder { private static final Logger log = LoggerFactory.getLogger(ExampleMarketDataBuilder.class); /** The name of the subdirectory containing historical fixings. */ private static final String HISTORICAL_FIXINGS_DIR = "historical-fixings"; /** The name of the subdirectory containing calibrated rates curves. */ private static final String CURVES_DIR = "curves"; /** The name of the curve groups file. */ private static final String CURVES_GROUPS_FILE = "groups.csv"; /** The name of the curve settings file. */ private static final String CURVES_SETTINGS_FILE = "settings.csv"; /** The name of the subdirectory containing simple market quotes. */ private static final String QUOTES_DIR = "quotes"; /** The name of the quotes file. */ private static final String QUOTES_FILE = "quotes.csv"; //------------------------------------------------------------------------- /** * Creates an instance from a given classpath resource root location using the class loader * which created this class. * <p> * This is designed to handle resource roots which may physically correspond to a directory on * disk, or be located within a jar file. * * @param resourceRoot the resource root path * @return the market data builder */ public static ExampleMarketDataBuilder ofResource(String resourceRoot) { return ofResource(resourceRoot, ExampleMarketDataBuilder.class.getClassLoader()); } /** * Creates an instance from a given classpath resource root location, using the given class loader * to find the resource. * <p> * This is designed to handle resource roots which may physically correspond to a directory on * disk, or be located within a jar file. * * @param resourceRoot the resource root path * @param classLoader the class loader with which to find the resource * @return the market data builder */ public static ExampleMarketDataBuilder ofResource(String resourceRoot, ClassLoader classLoader) { // classpath resources are forward-slash separated String qualifiedRoot = resourceRoot; qualifiedRoot = qualifiedRoot.startsWith("/") ? qualifiedRoot.substring(1) : qualifiedRoot; qualifiedRoot = qualifiedRoot.startsWith("\\") ? qualifiedRoot.substring(1) : qualifiedRoot; qualifiedRoot = qualifiedRoot.endsWith("/") ? qualifiedRoot : qualifiedRoot + "/"; URL url = classLoader.getResource(qualifiedRoot); if (url == null) { throw new IllegalArgumentException(Messages.format("Classpath resource not found: {}", qualifiedRoot)); } if (url.getProtocol() != null && "jar".equals(url.getProtocol().toLowerCase(Locale.ENGLISH))) { // Inside a JAR int classSeparatorIdx = url.getFile().indexOf("!"); if (classSeparatorIdx == -1) { throw new IllegalArgumentException(Messages.format("Unexpected JAR file URL: {}", url)); } String jarPath = url.getFile().substring("file:".length(), classSeparatorIdx); File jarFile; try { jarFile = new File(jarPath); } catch (Exception e) { throw new IllegalArgumentException(Messages.format("Unable to create file for JAR: {}", jarPath), e); } return new JarMarketDataBuilder(jarFile, resourceRoot); } else { // Resource is on disk File file; try { file = new File(url.toURI()); } catch (URISyntaxException e) { throw new IllegalArgumentException(Messages.format("Unexpected file location: {}", url), e); } return new DirectoryMarketDataBuilder(file.toPath()); } } /** * Creates an instance from a given directory root. * * @param rootPath the root directory * @return the market data builder */ public static ExampleMarketDataBuilder ofPath(Path rootPath) { return new DirectoryMarketDataBuilder(rootPath); } //------------------------------------------------------------------------- /** * Builds a market data snapshot from this environment. * * @param marketDataDate the date of the market data * @return the snapshot */ public ImmutableMarketData buildSnapshot(LocalDate marketDataDate) { ImmutableMarketDataBuilder builder = ImmutableMarketData.builder(marketDataDate); loadFixingSeries(builder); loadRatesCurves(builder, marketDataDate); loadQuotes(builder, marketDataDate); loadFxRates(builder); return builder.build(); } /** * Gets the rates market lookup to use with this environment. * * @param marketDataDate the date of the market data * @return the rates lookup */ public RatesMarketDataLookup ratesLookup(LocalDate marketDataDate) { SortedMap<LocalDate, RatesCurveGroup> curves = loadAllRatesCurves(); return RatesMarketDataLookup.of(curves.get(marketDataDate)); } /** * Gets all rates curves. * * @return the map of all rates curves */ public SortedMap<LocalDate, RatesCurveGroup> loadAllRatesCurves() { if (!subdirectoryExists(CURVES_DIR)) { throw new IllegalArgumentException("No rates curves directory found"); } ResourceLocator curveGroupsResource = getResource(CURVES_DIR, CURVES_GROUPS_FILE); if (curveGroupsResource == null) { throw new IllegalArgumentException(Messages.format( "Unable to load rates curves: curve groups file not found at {}/{}", CURVES_DIR, CURVES_GROUPS_FILE)); } ResourceLocator curveSettingsResource = getResource(CURVES_DIR, CURVES_SETTINGS_FILE); if (curveSettingsResource == null) { throw new IllegalArgumentException(Messages.format( "Unable to load rates curves: curve settings file not found at {}/{}", CURVES_DIR, CURVES_SETTINGS_FILE)); } ListMultimap<LocalDate, RatesCurveGroup> curveGroups = RatesCurvesCsvLoader.loadAllDates(curveGroupsResource, curveSettingsResource, getRatesCurvesResources()); // There is only one curve group in the market data file so this will always succeed Map<LocalDate, RatesCurveGroup> curveGroupMap = Maps.transformValues(curveGroups.asMap(), groups -> groups.iterator().next()); return new TreeMap<>(curveGroupMap); } //------------------------------------------------------------------------- private void loadFixingSeries(ImmutableMarketDataBuilder builder) { if (!subdirectoryExists(HISTORICAL_FIXINGS_DIR)) { log.debug("No historical fixings directory found"); return; } try { Collection<ResourceLocator> fixingSeriesResources = getAllResources(HISTORICAL_FIXINGS_DIR); Map<ObservableId, LocalDateDoubleTimeSeries> fixingSeries = FixingSeriesCsvLoader.load(fixingSeriesResources); builder.addTimeSeriesMap(fixingSeries); } catch (Exception e) { log.error("Error loading fixing series", e); } } private void loadRatesCurves(ImmutableMarketDataBuilder builder, LocalDate marketDataDate) { if (!subdirectoryExists(CURVES_DIR)) { log.debug("No rates curves directory found"); return; } ResourceLocator curveGroupsResource = getResource(CURVES_DIR, CURVES_GROUPS_FILE); if (curveGroupsResource == null) { log.error("Unable to load rates curves: curve groups file not found at {}/{}", CURVES_DIR, CURVES_GROUPS_FILE); return; } ResourceLocator curveSettingsResource = getResource(CURVES_DIR, CURVES_SETTINGS_FILE); if (curveSettingsResource == null) { log.error("Unable to load rates curves: curve settings file not found at {}/{}", CURVES_DIR, CURVES_SETTINGS_FILE); return; } try { Collection<ResourceLocator> curvesResources = getRatesCurvesResources(); List<RatesCurveGroup> ratesCurves = RatesCurvesCsvLoader.load(marketDataDate, curveGroupsResource, curveSettingsResource, curvesResources); for (RatesCurveGroup group : ratesCurves) { // add entry for higher level discount curve name group.getDiscountCurves().forEach( (ccy, curve) -> builder.addValue(CurveId.of(group.getName(), curve.getName()), curve)); // add entry for higher level forward curve name group.getForwardCurves().forEach( (idx, curve) -> builder.addValue(CurveId.of(group.getName(), curve.getName()), curve)); } } catch (Exception e) { log.error("Error loading rates curves", e); } } // load quotes private void loadQuotes(ImmutableMarketDataBuilder builder, LocalDate marketDataDate) { if (!subdirectoryExists(QUOTES_DIR)) { log.debug("No quotes directory found"); return; } ResourceLocator quotesResource = getResource(QUOTES_DIR, QUOTES_FILE); if (quotesResource == null) { log.error("Unable to load quotes: quotes file not found at {}/{}", QUOTES_DIR, QUOTES_FILE); return; } try { Map<QuoteId, Double> quotes = QuotesCsvLoader.load(marketDataDate, quotesResource); builder.addValueMap(quotes); } catch (Exception ex) { log.error("Error loading quotes", ex); } } private void loadFxRates(ImmutableMarketDataBuilder builder) { // TODO - load from CSV file - format to be defined builder.addValue(FxRateId.of(Currency.GBP, Currency.USD), FxRate.of(Currency.GBP, Currency.USD, 1.61)); } //------------------------------------------------------------------------- private Collection<ResourceLocator> getRatesCurvesResources() { return getAllResources(CURVES_DIR).stream() .filter(res -> !res.getLocator().endsWith(CURVES_GROUPS_FILE)) .filter(res -> !res.getLocator().endsWith(CURVES_SETTINGS_FILE)) .collect(toImmutableList()); } //------------------------------------------------------------------------- /** * Gets all available resources from a given subdirectory. * * @param subdirectoryName the name of the subdirectory * @return a collection of locators for the resources in the subdirectory */ protected abstract Collection<ResourceLocator> getAllResources(String subdirectoryName); /** * Gets a specific resource from a given subdirectory. * * @param subdirectoryName the name of the subdirectory * @param resourceName the name of the resource * @return a locator for the requested resource */ protected abstract ResourceLocator getResource(String subdirectoryName, String resourceName); /** * Checks whether a specific subdirectory exists. * * @param subdirectoryName the name of the subdirectory * @return whether the subdirectory exists */ protected abstract boolean subdirectoryExists(String subdirectoryName); }
package org.linkeddatafragments.util; import org.linkeddatafragments.exceptions.NoRegisteredMimeTypesException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang.math.NumberUtils; /** * MIME-Type Parser * * This class provides basic functions for handling mime-types. It can handle * matching mime-types against a list of media-ranges. See section 14.1 of the * HTTP specification [RFC 2616] for a complete explanation. * * http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1 * * A port to Java of Joe Gregorio's MIME-Type Parser: * * http://code.google.com/p/mimeparse/ * * Ported by <a href="mailto:tzellman@gmail.com">Tom Zellman</a>. * Extended by <a href="mailto:miel.vandersande@ugent.be">Miel Vander Sande</a> * */ public final class MIMEParse { private final static List<String> mimeTypes = new ArrayList<>(); /** * Register mimeType in collection * @param mimeType */ public static void register(String mimeType) { mimeTypes.add(mimeType); } /** * Parse results container */ protected static class ParseResults { String type; String subType; // !a dictionary of all the parameters for the media range Map<String, String> params; @Override public String toString() { StringBuffer s = new StringBuffer("('" + type + "', '" + subType + "', {"); for (String k : params.keySet()) s.append("'" + k + "':'" + params.get(k) + "',"); return s.append("})").toString(); } } /** * Carves up a mime-type and returns a ParseResults object * * For example, the media range 'application/xhtml;q=0.5' would get parsed * into: * * ('application', 'xhtml', {'q', '0.5'}) * @param mimeType * @return */ protected static ParseResults parseMimeType(String mimeType) { String[] parts = StringUtils.split(mimeType, ";"); ParseResults results = new ParseResults(); results.params = new HashMap<String, String>(); for (int i = 1; i < parts.length; ++i) { String p = parts[i]; String[] subParts = StringUtils.split(p, '='); if (subParts.length == 2) results.params.put(subParts[0].trim(), subParts[1].trim()); } String fullType = parts[0].trim(); // Java URLConnection class sends an Accept header that includes a // single "*" - Turn it into a legal wildcard. if (fullType.equals("*")) fullType = "*/*"; String[] types = StringUtils.split(fullType, "/"); results.type = types[0].trim(); results.subType = types[1].trim(); return results; } /** * Carves up a media range and returns a ParseResults. * * For example, the media range 'application/*;q=0.5' would get parsed into: * * ('application', '*', {'q', '0.5'}) * * In addition this function also guarantees that there is a value for 'q' * in the params dictionary, filling it in with a proper default if * necessary. * * @param range * @return */ protected static ParseResults parseMediaRange(String range) { ParseResults results = parseMimeType(range); String q = results.params.get("q"); float f = NumberUtils.toFloat(q, 1); if (StringUtils.isBlank(q) || f < 0 || f > 1) results.params.put("q", "1"); return results; } /** * Structure for holding a fitness/quality combo */ protected static class FitnessAndQuality implements Comparable<FitnessAndQuality> { int fitness; float quality; String mimeType; // optionally used /** * * @param fitness * @param quality */ public FitnessAndQuality(int fitness, float quality) { this.fitness = fitness; this.quality = quality; } public int compareTo(FitnessAndQuality o) { if (fitness == o.fitness) { if (quality == o.quality) return 0; else return quality < o.quality ? -1 : 1; } else return fitness < o.fitness ? -1 : 1; } } /** * Find the best match for a given mimeType against a list of media_ranges * that have already been parsed by MimeParse.parseMediaRange(). Returns a * tuple of the fitness value and the value of the 'q' quality parameter of * the best match, or (-1, 0) if no match was found. Just as for * quality_parsed(), 'parsed_ranges' must be a list of parsed media ranges. * * @param mimeType * @param parsedRanges * @return */ protected static FitnessAndQuality fitnessAndQualityParsed(String mimeType, Collection<ParseResults> parsedRanges) { int bestFitness = -1; float bestFitQ = 0; ParseResults target = parseMediaRange(mimeType); for (ParseResults range : parsedRanges) { if ((target.type.equals(range.type) || range.type.equals("*") || target.type .equals("*")) && (target.subType.equals(range.subType) || range.subType.equals("*") || target.subType .equals("*"))) { for (String k : target.params.keySet()) { int paramMatches = 0; if (!k.equals("q") && range.params.containsKey(k) && target.params.get(k).equals(range.params.get(k))) { paramMatches++; } int fitness = (range.type.equals(target.type)) ? 100 : 0; fitness += (range.subType.equals(target.subType)) ? 10 : 0; fitness += paramMatches; if (fitness > bestFitness) { bestFitness = fitness; bestFitQ = NumberUtils .toFloat(range.params.get("q"), 0); } } } } return new FitnessAndQuality(bestFitness, bestFitQ); } /** * Find the best match for a given mime-type against a list of ranges that * have already been parsed by parseMediaRange(). Returns the 'q' quality * parameter of the best match, 0 if no match was found. This function * bahaves the same as quality() except that 'parsed_ranges' must be a list * of parsed media ranges. * * @param mimeType * @param parsedRanges * @return */ protected static float qualityParsed(String mimeType, Collection<ParseResults> parsedRanges) { return fitnessAndQualityParsed(mimeType, parsedRanges).quality; } /** * Returns the quality 'q' of a mime-type when compared against the * mediaRanges in ranges. For example: * * @param mimeType * @param ranges * @return */ public static float quality(String mimeType, String ranges) { List<ParseResults> results = new LinkedList<ParseResults>(); for (String r : StringUtils.split(ranges, ',')) results.add(parseMediaRange(r)); return qualityParsed(mimeType, results); } /** * Takes a list of supported mime-types and finds the best match for all the * media-ranges listed in header. The value of header must be a string that * conforms to the format of the HTTP Accept: header. The value of * 'supported' is a list of mime-types. * * MimeParse.bestMatch(Arrays.asList(new String[]{"application/xbel+xml", * "text/xml"}), "text/*;q=0.5,*; q=0.1") 'text/xml' * * @param supported * @param header * @return * @throws org.linkeddatafragments.exceptions.NoRegisteredMimeTypesException */ public static String bestMatch(List<String> supported, String header) throws NoRegisteredMimeTypesException { if (supported.isEmpty()) throw new NoRegisteredMimeTypesException(); List<ParseResults> parseResults = new LinkedList<ParseResults>(); List<FitnessAndQuality> weightedMatches = new LinkedList<FitnessAndQuality>(); for (String r : StringUtils.split(header, ',')) parseResults.add(parseMediaRange(r)); for (String s : supported) { FitnessAndQuality fitnessAndQuality = fitnessAndQualityParsed(s, parseResults); fitnessAndQuality.mimeType = s; weightedMatches.add(fitnessAndQuality); } Collections.sort(weightedMatches); FitnessAndQuality lastOne = weightedMatches .get(weightedMatches.size() - 1); return NumberUtils.compare(lastOne.quality, 0) != 0 ? lastOne.mimeType : supported.get(0); } /** * * @param header * @return * @throws NoRegisteredMimeTypesException */ public static String bestMatch(String header) throws NoRegisteredMimeTypesException { return bestMatch(mimeTypes, header); } // hidden private MIMEParse() { } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.caffeine.cache.springboot; import javax.annotation.Generated; import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.CacheLoader; import com.github.benmanes.caffeine.cache.RemovalListener; import com.github.benmanes.caffeine.cache.stats.StatsCounter; import org.apache.camel.component.caffeine.EvictionType; import org.apache.camel.component.caffeine.cache.CaffeineCacheComponent; import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon; import org.springframework.boot.context.properties.ConfigurationProperties; /** * The caffeine-cache component is used for integration with Caffeine Cache. * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @ConfigurationProperties(prefix = "camel.component.caffeine-cache") public class CaffeineCacheComponentConfiguration extends ComponentConfigurationPropertiesCommon { /** * Whether to enable auto configuration of the caffeine-cache component. * This is enabled by default. */ private Boolean enabled; /** * Sets the global component configuration */ private CaffeineConfigurationNestedConfiguration configuration; /** * Whether the component should resolve property placeholders on itself when * starting. Only properties which are of String type can use property * placeholders. */ private Boolean resolvePropertyPlaceholders = true; public CaffeineConfigurationNestedConfiguration getConfiguration() { return configuration; } public void setConfiguration( CaffeineConfigurationNestedConfiguration configuration) { this.configuration = configuration; } public Boolean getResolvePropertyPlaceholders() { return resolvePropertyPlaceholders; } public void setResolvePropertyPlaceholders( Boolean resolvePropertyPlaceholders) { this.resolvePropertyPlaceholders = resolvePropertyPlaceholders; } public static class CaffeineConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.caffeine.CaffeineConfiguration.class; /** * Configure if a cache need to be created if it does exist or can't be * pre-configured. */ private Boolean createCacheIfNotExist = true; /** * To configure the default cache action. If an action is set in the * message header, then the operation from the header takes precedence. */ private String action; /** * To configure the default action key. If a key is set in the message * header, then the key from the header takes precedence. */ private Object key; /** * The cache key type, default java.lang.Object */ private String keyType = "java.lang.Object"; /** * The cache value type, default java.lang.Object */ private String valueType = "java.lang.Object"; /** * To configure an already instantiated cache to be used */ private Cache cache; /** * To configure a CacheLoader in case of a LoadCache use */ private CacheLoader cacheLoader; /** * To enable stats on the cache */ private Boolean statsEnabled = false; /** * Set the initial Capacity for the cache */ private Integer initialCapacity = 10000; /** * Set the maximum size for the cache */ private Integer maximumSize = 10000; /** * Set the eviction Type for this cache */ private EvictionType evictionType = EvictionType.SIZE_BASED; /** * Set the expire After Access Time in case of time based Eviction (in * seconds) */ private Integer expireAfterAccessTime = 300; /** * Set the expire After Access Write in case of time based Eviction (in * seconds) */ private Integer expireAfterWriteTime = 300; /** * Set a specific removal Listener for the cache */ private RemovalListener removalListener; /** * Set a specific Stats Counter for the cache stats */ private StatsCounter statsCounter; public Boolean getCreateCacheIfNotExist() { return createCacheIfNotExist; } public void setCreateCacheIfNotExist(Boolean createCacheIfNotExist) { this.createCacheIfNotExist = createCacheIfNotExist; } public String getAction() { return action; } public void setAction(String action) { this.action = action; } public Object getKey() { return key; } public void setKey(Object key) { this.key = key; } public String getKeyType() { return keyType; } public void setKeyType(String keyType) { this.keyType = keyType; } public String getValueType() { return valueType; } public void setValueType(String valueType) { this.valueType = valueType; } public Cache getCache() { return cache; } public void setCache(Cache cache) { this.cache = cache; } public CacheLoader getCacheLoader() { return cacheLoader; } public void setCacheLoader(CacheLoader cacheLoader) { this.cacheLoader = cacheLoader; } public Boolean getStatsEnabled() { return statsEnabled; } public void setStatsEnabled(Boolean statsEnabled) { this.statsEnabled = statsEnabled; } public Integer getInitialCapacity() { return initialCapacity; } public void setInitialCapacity(Integer initialCapacity) { this.initialCapacity = initialCapacity; } public Integer getMaximumSize() { return maximumSize; } public void setMaximumSize(Integer maximumSize) { this.maximumSize = maximumSize; } public EvictionType getEvictionType() { return evictionType; } public void setEvictionType(EvictionType evictionType) { this.evictionType = evictionType; } public Integer getExpireAfterAccessTime() { return expireAfterAccessTime; } public void setExpireAfterAccessTime(Integer expireAfterAccessTime) { this.expireAfterAccessTime = expireAfterAccessTime; } public Integer getExpireAfterWriteTime() { return expireAfterWriteTime; } public void setExpireAfterWriteTime(Integer expireAfterWriteTime) { this.expireAfterWriteTime = expireAfterWriteTime; } public RemovalListener getRemovalListener() { return removalListener; } public void setRemovalListener(RemovalListener removalListener) { this.removalListener = removalListener; } public StatsCounter getStatsCounter() { return statsCounter; } public void setStatsCounter(StatsCounter statsCounter) { this.statsCounter = statsCounter; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht.colocated; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collection; import java.util.Deque; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicReference; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteLogger; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.IgniteDiagnosticAware; import org.apache.ignite.internal.IgniteDiagnosticPrepareContext; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException; import org.apache.ignite.internal.cluster.ClusterTopologyServerNotFoundException; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.CacheEntryPredicate; import org.apache.ignite.internal.processors.cache.CacheObject; import org.apache.ignite.internal.processors.cache.CacheStoppedException; import org.apache.ignite.internal.processors.cache.GridCacheCompoundIdentityFuture; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheEntryEx; import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException; import org.apache.ignite.internal.processors.cache.GridCacheLockTimeoutException; import org.apache.ignite.internal.processors.cache.GridCacheMvccCandidate; import org.apache.ignite.internal.processors.cache.GridCacheVersionedFuture; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.distributed.GridDistributedCacheEntry; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtCacheEntry; import org.apache.ignite.internal.processors.cache.distributed.dht.GridDhtTopologyFuture; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearLockMapping; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearLockRequest; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearLockResponse; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxLocal; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxEntry; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxKey; import org.apache.ignite.internal.processors.cache.transactions.TxDeadlock; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.processors.timeout.GridTimeoutObjectAdapter; import org.apache.ignite.internal.transactions.IgniteTxTimeoutCheckedException; import org.apache.ignite.internal.util.future.GridEmbeddedFuture; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.tostring.GridToStringExclude; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.C1; import org.apache.ignite.internal.util.typedef.C2; import org.apache.ignite.internal.util.typedef.CI1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteBiTuple; import org.apache.ignite.lang.IgniteInClosure; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.transactions.TransactionDeadlockException; import org.apache.ignite.transactions.TransactionIsolation; import org.jetbrains.annotations.Nullable; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; import static org.apache.ignite.events.EventType.EVT_CACHE_OBJECT_READ; /** * Colocated cache lock future. */ public final class GridDhtColocatedLockFuture extends GridCacheCompoundIdentityFuture<Boolean> implements GridCacheVersionedFuture<Boolean>, IgniteDiagnosticAware { /** */ private static final long serialVersionUID = 0L; /** Logger reference. */ private static final AtomicReference<IgniteLogger> logRef = new AtomicReference<>(); /** Logger. */ private static IgniteLogger log; /** Logger. */ private static IgniteLogger msgLog; /** Done field updater. */ private static final AtomicIntegerFieldUpdater<GridDhtColocatedLockFuture> DONE_UPD = AtomicIntegerFieldUpdater.newUpdater(GridDhtColocatedLockFuture.class, "done"); /** Cache registry. */ @GridToStringExclude private final GridCacheContext<?, ?> cctx; /** Lock owner thread. */ @GridToStringInclude private final long threadId; /** Keys to lock. */ @GridToStringInclude private Collection<KeyCacheObject> keys; /** Future ID. */ private final IgniteUuid futId; /** Lock version. */ private final GridCacheVersion lockVer; /** Read flag. */ private final boolean read; /** Flag to return value. */ private final boolean retval; /** Error. */ private volatile Throwable err; /** Timeout object. */ @GridToStringExclude private volatile LockTimeoutObject timeoutObj; /** Lock timeout. */ private final long timeout; /** Filter. */ private final CacheEntryPredicate[] filter; /** Transaction. */ @GridToStringExclude private final GridNearTxLocal tx; /** Topology snapshot to operate on. */ private volatile AffinityTopologyVersion topVer; /** Map of current values. */ private final Map<KeyCacheObject, IgniteBiTuple<GridCacheVersion, CacheObject>> valMap; /** */ @SuppressWarnings("UnusedDeclaration") private volatile int done; /** Trackable flag (here may be non-volatile). */ private boolean trackable; /** TTL for create operation. */ private final long createTtl; /** TTL for read operation. */ private final long accessTtl; /** Skip store flag. */ private final boolean skipStore; /** */ private Deque<GridNearLockMapping> mappings; /** Keep binary. */ private final boolean keepBinary; /** */ private final boolean recovery; /** */ private int miniId; /** * @param cctx Registry. * @param keys Keys to lock. * @param tx Transaction. * @param read Read flag. * @param retval Flag to return value or not. * @param timeout Lock acquisition timeout. * @param createTtl TTL for create operation. * @param accessTtl TTL for read operation. * @param filter Filter. * @param skipStore Skip store flag. */ public GridDhtColocatedLockFuture( GridCacheContext<?, ?> cctx, Collection<KeyCacheObject> keys, @Nullable GridNearTxLocal tx, boolean read, boolean retval, long timeout, long createTtl, long accessTtl, CacheEntryPredicate[] filter, boolean skipStore, boolean keepBinary, boolean recovery ) { super(CU.boolReducer()); assert keys != null; this.cctx = cctx; this.keys = keys; this.tx = tx; this.read = read; this.retval = retval; this.timeout = timeout; this.createTtl = createTtl; this.accessTtl = accessTtl; this.filter = filter; this.skipStore = skipStore; this.keepBinary = keepBinary; this.recovery = recovery; ignoreInterrupts(); threadId = tx == null ? Thread.currentThread().getId() : tx.threadId(); lockVer = tx != null ? tx.xidVersion() : cctx.versions().next(); futId = IgniteUuid.randomUuid(); if (log == null) { msgLog = cctx.shared().txLockMessageLogger(); log = U.logger(cctx.kernalContext(), logRef, GridDhtColocatedLockFuture.class); } valMap = new ConcurrentHashMap<>(); if (tx != null && !tx.updateLockFuture(null, this)) { onError(tx.timedOut() ? tx.timeoutException() : tx.rollbackException()); onComplete(false, false); } } /** {@inheritDoc} */ @Override public GridCacheVersion version() { return lockVer; } /** {@inheritDoc} */ @Override public boolean onOwnerChanged(GridCacheEntryEx entry, GridCacheMvccCandidate owner) { return false; } /** * @return Future ID. */ @Override public IgniteUuid futureId() { return futId; } /** {@inheritDoc} */ @Override public boolean trackable() { return trackable; } /** {@inheritDoc} */ @Override public void markNotTrackable() { trackable = false; } /** * @return {@code True} if transaction is not {@code null}. */ private boolean inTx() { return tx != null; } /** * @return {@code True} if implicit-single-tx flag is set. */ private boolean implicitSingleTx() { return tx != null && tx.implicitSingle(); } /** * @return {@code True} if transaction is not {@code null} and has invalidate flag set. */ private boolean isInvalidate() { return tx != null && tx.isInvalidate(); } /** * @return Transaction isolation or {@code null} if no transaction. */ @Nullable private TransactionIsolation isolation() { return tx == null ? null : tx.isolation(); } /** * @return {@code true} if related transaction is implicit. */ private boolean implicitTx() { return tx != null && tx.implicit(); } /** * Adds entry to future. * * @param entry Entry to add. * @return Non-reentry candidate if lock should be acquired on remote node, * reentry candidate if locks has been already acquired and {@code null} if explicit locks is held and * implicit transaction accesses locked entry. * @throws IgniteCheckedException If failed to add entry due to external locking. */ @Nullable private GridCacheMvccCandidate addEntry(GridDistributedCacheEntry entry) throws IgniteCheckedException { IgniteTxKey txKey = entry.txKey(); GridCacheMvccCandidate cand = cctx.mvcc().explicitLock(threadId, txKey); if (inTx()) { if (cand != null) { if (!tx.implicit()) throw new IgniteCheckedException("Cannot access key within transaction if lock is " + "externally held [key=" + entry.key() + ", entry=" + entry + ']'); else return null; } else { IgniteTxEntry txEntry = tx.entry(txKey); assert txEntry != null; txEntry.cached(entry); // Check transaction entries (corresponding tx entries must be enlisted in transaction). cand = new GridCacheMvccCandidate(entry, cctx.localNodeId(), null, null, threadId, lockVer, true, txEntry.locked(), inTx(), inTx() && tx.implicitSingle(), false, false, null, false); cand.topologyVersion(topVer); } } else { if (cand == null) { cand = new GridCacheMvccCandidate(entry, cctx.localNodeId(), null, null, threadId, lockVer, true, false, inTx(), inTx() && tx.implicitSingle(), false, false, null, false); cand.topologyVersion(topVer); } else cand = cand.reenter(); cctx.mvcc().addExplicitLock(threadId, cand, topVer); } return cand; } /** * Undoes all locks. * * @param dist If {@code true}, then remove locks from remote nodes as well. * @param rollback {@code True} if should rollback tx. */ private void undoLocks(boolean dist, boolean rollback) { // Transactions will undo during rollback. if (dist && tx == null) cctx.colocated().removeLocks(threadId, lockVer, keys); else { if (rollback && tx != null) { if (tx.setRollbackOnly()) { if (log.isDebugEnabled()) log.debug("Marked transaction as rollback only because locks could not be acquired: " + tx); } else if (log.isDebugEnabled()) log.debug("Transaction was not marked rollback-only while locks were not acquired: " + tx); } } cctx.mvcc().recheckPendingLocks(); } /** * @param success Success flag. */ public void complete(boolean success) { onComplete(success, true); } /** * @param nodeId Left node ID * @return {@code True} if node was in the list. */ @Override public boolean onNodeLeft(UUID nodeId) { boolean found = false; for (IgniteInternalFuture<?> fut : futures()) { if (isMini(fut)) { MiniFuture f = (MiniFuture)fut; if (f.node().id().equals(nodeId)) { f.onResult(newTopologyException(null, nodeId)); found = true; } } } if (log.isDebugEnabled()) log.debug("Near lock future does not have mapping for left node (ignoring) [nodeId=" + nodeId + ", fut=" + this + ']'); return found; } /** * @param nodeId Sender. * @param res Result. */ @SuppressWarnings("SynchronizeOnNonFinalField") void onResult(UUID nodeId, GridNearLockResponse res) { boolean done = isDone(); if (!done) { // onResult is always called after map() and timeoutObj is never reset to null, so this is // a race-free null check. if (timeoutObj == null) { onResult0(nodeId, res); return; } synchronized (timeoutObj) { if (!isDone()) { if (onResult0(nodeId, res)) return; } else done = true; } } if (done && msgLog.isDebugEnabled()) { msgLog.debug("Collocated lock fut, response for finished future [txId=" + lockVer + ", inTx=" + inTx() + ", node=" + nodeId + ']'); } } /** * @param nodeId Sender. * @param res Result. */ private boolean onResult0(UUID nodeId, GridNearLockResponse res) { MiniFuture mini = miniFuture(res.miniId()); if (mini != null) { assert mini.node().id().equals(nodeId); mini.onResult(res); return true; } // This warning can be triggered by deadlock detection code which clears pending futures. U.warn(msgLog, "Collocated lock fut, failed to find mini future [txId=" + lockVer + ", tx=" + (inTx() ? CU.txString(tx) : "N/A") + ", node=" + nodeId + ", res=" + res + ", fut=" + this + ']'); return false; } /** * @return Keys for which locks requested from remote nodes but response isn't received. */ public synchronized Set<IgniteTxKey> requestedKeys() { if (timeoutObj != null && timeoutObj.requestedKeys != null) return timeoutObj.requestedKeys; return requestedKeys0(); } /** * @return Keys for which locks requested from remote nodes but response isn't received. */ private Set<IgniteTxKey> requestedKeys0() { for (IgniteInternalFuture<Boolean> miniFut : futures()) { if (isMini(miniFut) && !miniFut.isDone()) { MiniFuture mini = (MiniFuture)miniFut; Set<IgniteTxKey> requestedKeys = U.newHashSet(mini.keys.size()); for (KeyCacheObject key : mini.keys) requestedKeys.add(new IgniteTxKey(key, cctx.cacheId())); return requestedKeys; } } return null; } /** * Finds pending mini future by the given mini ID. * * @param miniId Mini ID to find. * @return Mini future. */ @SuppressWarnings({"ForLoopReplaceableByForEach", "IfMayBeConditional"}) private MiniFuture miniFuture(int miniId) { // We iterate directly over the futs collection here to avoid copy. synchronized (this) { int size = futuresCountNoLock(); // Avoid iterator creation. for (int i = 0; i < size; i++) { IgniteInternalFuture<Boolean> fut = future(i); if (!isMini(fut)) continue; MiniFuture mini = (MiniFuture)fut; if (mini.futureId() == miniId) { if (!mini.isDone()) return mini; else return null; } } } return null; } /** * @param t Error. */ private synchronized void onError(Throwable t) { if (err == null && !(t instanceof GridCacheLockTimeoutException)) err = t; } /** * Cancellation has special meaning for lock futures. It's called then lock must be released on rollback. */ @Override public boolean cancel() { if (inTx()) onError(tx.rollbackException()); return onComplete(false, true); } /** {@inheritDoc} */ @Override public boolean onDone(Boolean success, Throwable err) { if (log.isDebugEnabled()) log.debug("Received onDone(..) callback [success=" + success + ", err=" + err + ", fut=" + this + ']'); // Local GridDhtLockFuture if (inTx() && this.err instanceof IgniteTxTimeoutCheckedException && cctx.tm().deadlockDetectionEnabled()) return false; if (isDone()) return false; if (err != null) onError(err); if (err != null) success = false; return onComplete(success, true); } /** * Completeness callback. * * @param success {@code True} if lock was acquired. * @param distribute {@code True} if need to distribute lock removal in case of failure. * @return {@code True} if complete by this operation. */ private boolean onComplete(boolean success, boolean distribute) { if (log.isDebugEnabled()) { log.debug("Received onComplete(..) callback [success=" + success + ", distribute=" + distribute + ", fut=" + this + ']'); } if (!DONE_UPD.compareAndSet(this, 0, 1)) return false; if (!success) undoLocks(distribute, true); if (tx != null) { cctx.tm().txContext(tx); if (success) tx.clearLockFuture(this); } if (super.onDone(success, err)) { if (log.isDebugEnabled()) log.debug("Completing future: " + this); // Clean up. cctx.mvcc().removeVersionedFuture(this); if (timeoutObj != null) cctx.time().removeTimeoutObject(timeoutObj); return true; } return false; } /** {@inheritDoc} */ @Override public int hashCode() { return futId.hashCode(); } /** {@inheritDoc} */ @Override public void addDiagnosticRequest(IgniteDiagnosticPrepareContext ctx) { if (!isDone()) { for (IgniteInternalFuture fut : futures()) { if (!fut.isDone() && isMini(fut)) { MiniFuture m = (MiniFuture)fut; AffinityTopologyVersion topVer = null; UUID rmtNodeId = null; synchronized (m) { if (!m.rcvRes && !m.node.isLocal()) { rmtNodeId = m.node.id(); topVer = this.topVer; } } if (rmtNodeId != null) { ctx.txKeyInfo(rmtNodeId, cctx.cacheId(), m.keys, "GridDhtColocatedLockFuture waiting for response [node=" + rmtNodeId + ", cache=" + cctx.name() + ", miniId=" + m.futId + ", topVer=" + topVer + ", keys=" + m.keys + ']'); return; } } } } } /** {@inheritDoc} */ @Override public String toString() { Collection<String> futs = F.viewReadOnly(futures(), new C1<IgniteInternalFuture<?>, String>() { @Override public String apply(IgniteInternalFuture<?> f) { if (isMini(f)) { MiniFuture m = (MiniFuture)f; synchronized (m) { return "[node=" + m.node().id() + ", rcvRes=" + m.rcvRes + ", loc=" + m.node().isLocal() + ", done=" + f.isDone() + "]"; } } else return "[loc=true, done=" + f.isDone() + "]"; } }); return S.toString(GridDhtColocatedLockFuture.class, this, "topVer", topVer, "innerFuts", futs, "inTx", inTx(), "super", super.toString()); } /** * @param f Future. * @return {@code True} if mini-future. */ private boolean isMini(IgniteInternalFuture<?> f) { return f.getClass().equals(MiniFuture.class); } /** * Basically, future mapping consists from two parts. First, we must determine the topology version this future * will map on. Locking is performed within a user transaction, we must continue to map keys on the same * topology version as it started. If topology version is undefined, we get current topology future and wait * until it completes so the topology is ready to use. * <p/> * During the second part we map keys to primary nodes using topology snapshot we obtained during the first * part. Note that if primary node leaves grid, the future will fail and transaction will be rolled back. */ void map() { if (isDone()) // Possible due to async rollback. return; if (timeout > 0) { timeoutObj = new LockTimeoutObject(); cctx.time().addTimeoutObject(timeoutObj); } // Obtain the topology version to use. AffinityTopologyVersion topVer = cctx.mvcc().lastExplicitLockTopologyVersion(threadId); // If there is another system transaction in progress, use it's topology version to prevent deadlock. if (topVer == null && tx != null && tx.system()) topVer = cctx.tm().lockedTopologyVersion(Thread.currentThread().getId(), tx); if (topVer != null && tx != null) tx.topologyVersion(topVer); if (topVer == null && tx != null) topVer = tx.topologyVersionSnapshot(); if (topVer != null) { for (GridDhtTopologyFuture fut : cctx.shared().exchange().exchangeFutures()) { if (fut.exchangeDone() && fut.topologyVersion().equals(topVer)) { Throwable err = fut.validateCache(cctx, recovery, read, null, keys); if (err != null) { onDone(err); return; } break; } } // Continue mapping on the same topology version as it was before. synchronized (this) { if (this.topVer == null) this.topVer = topVer; } map(keys, false, true); markInitialized(); return; } // Must get topology snapshot and map on that version. mapOnTopology(false, null); } /** * Acquires topology future and checks it completeness under the read lock. If it is not complete, * will asynchronously wait for it's completeness and then try again. * * @param remap Remap flag. * @param c Optional closure to run after map. */ private void mapOnTopology(final boolean remap, @Nullable final Runnable c) { // We must acquire topology snapshot from the topology version future. cctx.topology().readLock(); try { if (cctx.topology().stopping()) { onDone(new CacheStoppedException(cctx.name())); return; } GridDhtTopologyFuture fut = cctx.topologyVersionFuture(); if (fut.isDone()) { Throwable err = fut.validateCache(cctx, recovery, read, null, keys); if (err != null) { onDone(err); return; } AffinityTopologyVersion topVer = fut.topologyVersion(); if (remap) { if (tx != null) tx.onRemap(topVer); synchronized (this) { this.topVer = topVer; } } else { if (tx != null) tx.topologyVersion(topVer); synchronized (this) { if (this.topVer == null) this.topVer = topVer; } } map(keys, remap, false); if (c != null) c.run(); markInitialized(); } else { fut.listen(new CI1<IgniteInternalFuture<AffinityTopologyVersion>>() { @Override public void apply(IgniteInternalFuture<AffinityTopologyVersion> fut) { try { fut.get(); mapOnTopology(remap, c); } catch (IgniteCheckedException e) { onDone(e); } finally { cctx.shared().txContextReset(); } } }); } } finally { cctx.topology().readUnlock(); } } /** * Maps keys to nodes. Note that we can not simply group keys by nodes and send lock request as * such approach does not preserve order of lock acquisition. Instead, keys are split in continuous * groups belonging to one primary node and locks for these groups are acquired sequentially. * * @param keys Keys. * @param remap Remap flag. * @param topLocked {@code True} if thread already acquired lock preventing topology change. */ private void map(Collection<KeyCacheObject> keys, boolean remap, boolean topLocked) { try { map0( keys, remap, topLocked); } catch (IgniteCheckedException ex) { onDone(false, ex); } } /** * @param keys Keys to map. * @param remap Remap flag. * @param topLocked Topology locked flag. * @throws IgniteCheckedException If mapping failed. */ private synchronized void map0( Collection<KeyCacheObject> keys, boolean remap, boolean topLocked ) throws IgniteCheckedException { AffinityTopologyVersion topVer = this.topVer; assert topVer != null; assert topVer.topologyVersion() > 0; if (CU.affinityNodes(cctx, topVer).isEmpty()) { onDone(new ClusterTopologyServerNotFoundException("Failed to map keys for cache " + "(all partition nodes left the grid): " + cctx.name())); return; } boolean clientNode = cctx.kernalContext().clientNode(); assert !remap || (clientNode && (tx == null || !tx.hasRemoteLocks())); // First assume this node is primary for all keys passed in. if (!clientNode && mapAsPrimary(keys, topVer)) return; mappings = new ArrayDeque<>(); // Assign keys to primary nodes. GridNearLockMapping map = null; for (KeyCacheObject key : keys) { GridNearLockMapping updated = map(key, map, topVer); // If new mapping was created, add to collection. if (updated != map) { mappings.add(updated); if (tx != null && updated.node().isLocal()) tx.colocatedLocallyMapped(true); } map = updated; } if (isDone()) { if (log.isDebugEnabled()) log.debug("Abandoning (re)map because future is done: " + this); return; } if (log.isDebugEnabled()) log.debug("Starting (re)map for mappings [mappings=" + mappings + ", fut=" + this + ']'); boolean hasRmtNodes = false; boolean first = true; // Create mini futures. for (Iterator<GridNearLockMapping> iter = mappings.iterator(); iter.hasNext(); ) { GridNearLockMapping mapping = iter.next(); ClusterNode node = mapping.node(); Collection<KeyCacheObject> mappedKeys = mapping.mappedKeys(); boolean loc = node.equals(cctx.localNode()); assert !mappedKeys.isEmpty(); GridNearLockRequest req = null; Collection<KeyCacheObject> distributedKeys = new ArrayList<>(mappedKeys.size()); for (KeyCacheObject key : mappedKeys) { IgniteTxKey txKey = cctx.txKey(key); GridDistributedCacheEntry entry = null; if (tx != null) { IgniteTxEntry txEntry = tx.entry(txKey); if (txEntry != null) { entry = (GridDistributedCacheEntry)txEntry.cached(); if (entry != null && loc == entry.detached()) { entry = cctx.colocated().entryExx(key, topVer, true); txEntry.cached(entry); } } } boolean explicit; while (true) { try { if (entry == null) entry = cctx.colocated().entryExx(key, topVer, true); if (!cctx.isAll(entry, filter)) { if (log.isDebugEnabled()) log.debug("Entry being locked did not pass filter (will not lock): " + entry); onComplete(false, false); return; } assert loc ^ entry.detached() : "Invalid entry [loc=" + loc + ", entry=" + entry + ']'; GridCacheMvccCandidate cand = addEntry(entry); // Will either return value from dht cache or null if this is a miss. IgniteBiTuple<GridCacheVersion, CacheObject> val = entry.detached() ? null : ((GridDhtCacheEntry)entry).versionedValue(topVer); GridCacheVersion dhtVer = null; if (val != null) { dhtVer = val.get1(); valMap.put(key, val); } if (cand != null && !cand.reentry()) { if (req == null) { boolean clientFirst = false; if (first) { clientFirst = clientNode && !topLocked && (tx == null || !tx.hasRemoteLocks()); first = false; } assert !implicitTx() && !implicitSingleTx() : tx; req = new GridNearLockRequest( cctx.cacheId(), topVer, cctx.nodeId(), threadId, futId, lockVer, inTx(), read, retval, isolation(), isInvalidate(), timeout, mappedKeys.size(), inTx() ? tx.size() : mappedKeys.size(), inTx() && tx.syncMode() == FULL_SYNC, inTx() ? tx.subjectId() : null, inTx() ? tx.taskNameHash() : 0, read ? createTtl : -1L, read ? accessTtl : -1L, skipStore, keepBinary, clientFirst, false, cctx.deploymentEnabled()); mapping.request(req); } distributedKeys.add(key); if (tx != null) tx.addKeyMapping(txKey, mapping.node()); req.addKeyBytes( key, retval, dhtVer, // Include DHT version to match remote DHT entry. cctx); } explicit = inTx() && cand == null; if (explicit) tx.addKeyMapping(txKey, mapping.node()); break; } catch (GridCacheEntryRemovedException ignored) { if (log.isDebugEnabled()) log.debug("Got removed entry in lockAsync(..) method (will retry): " + entry); entry = null; } } // Mark mapping explicit lock flag. if (explicit) { boolean marked = tx != null && tx.markExplicit(node.id()); assert tx == null || marked; } } if (!distributedKeys.isEmpty()) { mapping.distributedKeys(distributedKeys); hasRmtNodes |= !mapping.node().isLocal(); } else { assert mapping.request() == null; iter.remove(); } } if (hasRmtNodes) { trackable = true; if (!remap && !cctx.mvcc().addFuture(this)) throw new IllegalStateException("Duplicate future ID: " + this); } else trackable = false; proceedMapping(); } /** * @throws IgniteCheckedException If failed. */ private void proceedMapping() throws IgniteCheckedException { boolean set = tx != null && cctx.shared().tm().setTxTopologyHint(tx.topologyVersionSnapshot()); try { proceedMapping0(); } finally { if (set) cctx.tm().setTxTopologyHint(null); } } /** * Gets next near lock mapping and either acquires dht locks locally or sends near lock request to * remote primary node. * * @throws IgniteCheckedException If mapping can not be completed. */ private void proceedMapping0() throws IgniteCheckedException { GridNearLockMapping map; synchronized (this) { map = mappings.poll(); } // If there are no more mappings to process, complete the future. if (map == null) return; final GridNearLockRequest req = map.request(); final Collection<KeyCacheObject> mappedKeys = map.distributedKeys(); final ClusterNode node = map.node(); if (filter != null && filter.length != 0) req.filter(filter, cctx); if (node.isLocal()) lockLocally(mappedKeys, req.topologyVersion()); else { final MiniFuture fut = new MiniFuture(node, mappedKeys, ++miniId); req.miniId(fut.futureId()); add(fut); // Append new future. IgniteInternalFuture<?> txSync = null; if (inTx()) txSync = cctx.tm().awaitFinishAckAsync(node.id(), tx.threadId()); if (txSync == null || txSync.isDone()) { try { cctx.io().send(node, req, cctx.ioPolicy()); if (msgLog.isDebugEnabled()) { msgLog.debug("Collocated lock fut, sent request [txId=" + lockVer + ", inTx=" + inTx() + ", node=" + node.id() + ']'); } } catch (ClusterTopologyCheckedException ex) { assert fut != null; fut.onResult(ex); } } else { txSync.listen(new CI1<IgniteInternalFuture<?>>() { @Override public void apply(IgniteInternalFuture<?> t) { try { cctx.io().send(node, req, cctx.ioPolicy()); if (msgLog.isDebugEnabled()) { msgLog.debug("Collocated lock fut, sent request [txId=" + lockVer + ", inTx=" + inTx() + ", node=" + node.id() + ']'); } } catch (ClusterTopologyCheckedException ex) { assert fut != null; fut.onResult(ex); } catch (IgniteCheckedException e) { if (msgLog.isDebugEnabled()) { msgLog.debug("Collocated lock fut, failed to send request [txId=" + lockVer + ", inTx=" + inTx() + ", node=" + node.id() + ", err=" + e + ']'); } onError(e); } } }); } } } /** * Locks given keys directly through dht cache. * @param keys Collection of keys. * @param topVer Topology version to lock on. */ private void lockLocally( final Collection<KeyCacheObject> keys, AffinityTopologyVersion topVer ) { if (log.isDebugEnabled()) log.debug("Before locally locking keys : " + keys); IgniteInternalFuture<Exception> fut = cctx.colocated().lockAllAsync(cctx, tx, threadId, lockVer, topVer, keys, read, retval, timeout, createTtl, accessTtl, filter, skipStore, keepBinary); // Add new future. add(new GridEmbeddedFuture<>( new C2<Exception, Exception, Boolean>() { @Override public Boolean apply(Exception resEx, Exception e) { if (CU.isLockTimeoutOrCancelled(e) || (resEx != null && CU.isLockTimeoutOrCancelled(resEx))) return false; if (e != null) { onError(e); return false; } if (resEx != null) { onError(resEx); return false; } if (log.isDebugEnabled()) log.debug("Acquired lock for local DHT mapping [locId=" + cctx.nodeId() + ", mappedKeys=" + keys + ", fut=" + GridDhtColocatedLockFuture.this + ']'); if (inTx()) { for (KeyCacheObject key : keys) tx.entry(cctx.txKey(key)).markLocked(); } else { for (KeyCacheObject key : keys) cctx.mvcc().markExplicitOwner(cctx.txKey(key), threadId); } try { // Proceed and add new future (if any) before completing embedded future. if (mappings != null) proceedMapping(); } catch (IgniteCheckedException ex) { onError(ex); return false; } return true; } }, fut)); } /** * Tries to map this future in assumption that local node is primary for all keys passed in. * If node is not primary for one of the keys, then mapping is reverted and full remote mapping is performed. * * @param keys Keys to lock. * @param topVer Topology version. * @return {@code True} if all keys were mapped locally, {@code false} if full mapping should be performed. * @throws IgniteCheckedException If key cannot be added to mapping. */ private boolean mapAsPrimary(Collection<KeyCacheObject> keys, AffinityTopologyVersion topVer) throws IgniteCheckedException { // Assign keys to primary nodes. Collection<KeyCacheObject> distributedKeys = new ArrayList<>(keys.size()); boolean explicit = false; for (KeyCacheObject key : keys) { if (!cctx.affinity().primaryByKey(cctx.localNode(), key, topVer)) { // Remove explicit locks added so far. for (KeyCacheObject k : keys) cctx.mvcc().removeExplicitLock(threadId, cctx.txKey(k), lockVer); return false; } explicit |= addLocalKey(key, topVer, distributedKeys); if (isDone()) return true; } trackable = false; if (tx != null) { if (explicit) tx.markExplicit(cctx.localNodeId()); tx.colocatedLocallyMapped(true); } if (!distributedKeys.isEmpty()) { if (tx != null) { for (KeyCacheObject key : distributedKeys) tx.addKeyMapping(cctx.txKey(key), cctx.localNode()); } lockLocally(distributedKeys, topVer); } return true; } /** * Adds local key future. * * @param key Key to add. * @param topVer Topology version. * @param distributedKeys Collection of keys needs to be locked. * @return {@code True} if transaction accesses key that was explicitly locked before. * @throws IgniteCheckedException If lock is externally held and transaction is explicit. */ private boolean addLocalKey( KeyCacheObject key, AffinityTopologyVersion topVer, Collection<KeyCacheObject> distributedKeys ) throws IgniteCheckedException { GridDistributedCacheEntry entry = cctx.colocated().entryExx(key, topVer, false); assert !entry.detached(); if (!cctx.isAll(entry, filter)) { if (log.isDebugEnabled()) log.debug("Entry being locked did not pass filter (will not lock): " + entry); onComplete(false, false); return false; } GridCacheMvccCandidate cand = addEntry(entry); if (cand != null && !cand.reentry()) distributedKeys.add(key); return inTx() && cand == null; } /** * @param mapping Mappings. * @param key Key to map. * @param topVer Topology version. * @return Near lock mapping. * @throws IgniteCheckedException If mapping failed. */ private GridNearLockMapping map( KeyCacheObject key, @Nullable GridNearLockMapping mapping, AffinityTopologyVersion topVer ) throws IgniteCheckedException { assert mapping == null || mapping.node() != null; ClusterNode primary = cctx.affinity().primaryByKey(key, topVer); if (primary == null) throw new ClusterTopologyServerNotFoundException("Failed to lock keys " + "(all partition nodes left the grid)."); if (cctx.discovery().node(primary.id()) == null) // If primary node left the grid before lock acquisition, fail the whole future. throw newTopologyException(null, primary.id()); if (mapping == null || !primary.id().equals(mapping.node().id())) mapping = new GridNearLockMapping(primary, key); else mapping.addKey(key); return mapping; } /** * Creates new topology exception for cases when primary node leaves grid during mapping. * * @param nested Optional nested exception. * @param nodeId Node ID. * @return Topology exception with user-friendly message. */ private ClusterTopologyCheckedException newTopologyException(@Nullable Throwable nested, UUID nodeId) { ClusterTopologyCheckedException topEx = new ClusterTopologyCheckedException("Failed to acquire lock for keys " + "(primary node left grid, retry transaction if possible) [keys=" + keys + ", node=" + nodeId + ']', nested); topEx.retryReadyFuture(cctx.shared().nextAffinityReadyFuture(topVer)); return topEx; } /** * Lock request timeout object. */ private class LockTimeoutObject extends GridTimeoutObjectAdapter { /** * Default constructor. */ LockTimeoutObject() { super(timeout); } /** Requested keys. */ private Set<IgniteTxKey> requestedKeys; /** {@inheritDoc} */ @Override public void onTimeout() { if (log.isDebugEnabled()) log.debug("Timed out waiting for lock response: " + this); if (inTx()) { if (cctx.tm().deadlockDetectionEnabled()) { synchronized (GridDhtColocatedLockFuture.this) { requestedKeys = requestedKeys0(); clear(); // Stop response processing. } Set<IgniteTxKey> keys = new HashSet<>(); for (IgniteTxEntry txEntry : tx.allEntries()) { if (!txEntry.locked()) keys.add(txEntry.txKey()); } IgniteInternalFuture<TxDeadlock> fut = cctx.tm().detectDeadlock(tx, keys); fut.listen(new IgniteInClosure<IgniteInternalFuture<TxDeadlock>>() { @Override public void apply(IgniteInternalFuture<TxDeadlock> fut) { try { TxDeadlock deadlock = fut.get(); err = new IgniteTxTimeoutCheckedException("Failed to acquire lock within provided " + "timeout for transaction [timeout=" + tx.timeout() + ", tx=" + CU.txString(tx) + ']', deadlock != null ? new TransactionDeadlockException(deadlock.toString(cctx.shared())) : null); } catch (IgniteCheckedException e) { err = e; U.warn(log, "Failed to detect deadlock.", e); } synchronized (LockTimeoutObject.this) { onComplete(false, true); } } }); } else err = tx.timeoutException(); } else { synchronized (this) { onComplete(false, true); } } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(LockTimeoutObject.class, this); } } /** * Mini-future for get operations. Mini-futures are only waiting on a single * node as opposed to multiple nodes. */ private class MiniFuture extends GridFutureAdapter<Boolean> { /** */ private final int futId; /** Node ID. */ @GridToStringExclude private final ClusterNode node; /** Keys. */ @GridToStringInclude private final Collection<KeyCacheObject> keys; /** */ private boolean rcvRes; /** Remap topology version for debug purpose. */ private AffinityTopologyVersion remapTopVer; /** * @param node Node. * @param keys Keys. * @param futId Mini future ID. */ MiniFuture( ClusterNode node, Collection<KeyCacheObject> keys, int futId ) { this.node = node; this.keys = keys; this.futId = futId; } /** * @return Future ID. */ int futureId() { return futId; } /** * @return Node ID. */ public ClusterNode node() { return node; } /** * @return Keys. */ public Collection<KeyCacheObject> keys() { return keys; } /** * @param e Node left exception. */ void onResult(ClusterTopologyCheckedException e) { if (msgLog.isDebugEnabled()) { msgLog.debug("Collocated lock fut, mini future node left [txId=" + lockVer + ", inTx=" + inTx() + ", nodeId=" + node.id() + ']'); } if (isDone()) return; synchronized (this) { if (rcvRes) return; rcvRes = true; } if (tx != null) tx.removeMapping(node.id()); // Primary node left the grid, so fail the future. GridDhtColocatedLockFuture.this.onDone(false, newTopologyException(e, node.id())); onDone(true); } /** * @param res Result callback. */ void onResult(GridNearLockResponse res) { synchronized (this) { if (rcvRes) return; rcvRes = true; remapTopVer = res.clientRemapVersion(); } if (res.error() != null) { if (inTx() && res.error() instanceof IgniteTxTimeoutCheckedException && cctx.tm().deadlockDetectionEnabled()) return; if (log.isDebugEnabled()) log.debug("Finishing mini future with an error due to error in response [miniFut=" + this + ", res=" + res + ']'); // Fail. if (res.error() instanceof GridCacheLockTimeoutException) onDone(false); else onDone(res.error()); return; } if (res.clientRemapVersion() != null) { assert cctx.kernalContext().clientNode(); IgniteInternalFuture<?> affFut = cctx.shared().exchange().affinityReadyFuture(res.clientRemapVersion()); if (affFut != null && !affFut.isDone()) { affFut.listen(new CI1<IgniteInternalFuture<?>>() { @Override public void apply(IgniteInternalFuture<?> fut) { try { fut.get(); remap(); } catch (IgniteCheckedException e) { onDone(e); } finally { cctx.shared().txContextReset(); } } }); } else remap(); } else { int i = 0; for (KeyCacheObject k : keys) { IgniteBiTuple<GridCacheVersion, CacheObject> oldValTup = valMap.get(k); CacheObject newVal = res.value(i); GridCacheVersion dhtVer = res.dhtVersion(i); if (newVal == null) { if (oldValTup != null) { if (oldValTup.get1().equals(dhtVer)) newVal = oldValTup.get2(); } } if (inTx()) { IgniteTxEntry txEntry = tx.entry(cctx.txKey(k)); // In colocated cache we must receive responses only for detached entries. assert txEntry.cached().detached() : txEntry; txEntry.markLocked(); GridDhtDetachedCacheEntry entry = (GridDhtDetachedCacheEntry)txEntry.cached(); if (res.dhtVersion(i) == null) { onDone(new IgniteCheckedException("Failed to receive DHT version from remote node " + "(will fail the lock): " + res)); return; } // Set value to detached entry. entry.resetFromPrimary(newVal, dhtVer); tx.hasRemoteLocks(true); if (log.isDebugEnabled()) log.debug("Processed response for entry [res=" + res + ", entry=" + entry + ']'); } else cctx.mvcc().markExplicitOwner(cctx.txKey(k), threadId); if (retval && cctx.events().isRecordable(EVT_CACHE_OBJECT_READ)) { cctx.events().addEvent(cctx.affinity().partition(k), k, tx, null, EVT_CACHE_OBJECT_READ, newVal, newVal != null, null, false, CU.subjectId(tx, cctx.shared()), null, tx == null ? null : tx.resolveTaskName(), keepBinary); } i++; } try { proceedMapping(); } catch (IgniteCheckedException e) { onDone(e); } onDone(true); } } /** * */ private void remap() { undoLocks(false, false); for (KeyCacheObject key : GridDhtColocatedLockFuture.this.keys) cctx.mvcc().removeExplicitLock(threadId, cctx.txKey(key), lockVer); mapOnTopology(true, new Runnable() { @Override public void run() { onDone(true); } }); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(MiniFuture.class, this, "node", node.id(), "super", super.toString()); } } }
package com.ubudu.iot.sample.fragment; import android.os.Bundle; import android.support.annotation.Nullable; import android.text.Editable; import android.text.TextWatcher; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import com.ubudu.iot.sample.R; import com.ubudu.iot.sample.util.ToastUtil; import com.ubudu.iot.util.LongDataProtocolV3; import butterknife.BindView; import butterknife.ButterKnife; /** * Created by mgasztold on 08/09/2017. */ public class CommFragment extends BaseFragment { public static final String TAG = CommFragment.class.getCanonicalName(); @BindView(R.id.send_message_edit_text) EditText messageEditText; @BindView(R.id.response_text_view) TextView responseTextView; @BindView(R.id.send_data_button) Button sendDataButton; @BindView(R.id.disconnect_button) Button disconnectButton; @BindView(R.id.loopback_mode) CheckBox loopBackModeCheckBox; @BindView(R.id.set_bytes_count_mode) CheckBox ascendingBytesModeCheckBox; @BindView(R.id.stream) CheckBox streamCheckBox; @BindView(R.id.stream_delay_edit_text) EditText streamDelayEditText; @BindView(R.id.stream_iterations_edit_text) EditText streamIterationsEditText; @BindView(R.id.iterations_text_view) TextView iterationsTextView; @BindView(R.id.iteration_delay_text_view) TextView iterationDelayTextView; @BindView(R.id.message_text_view) TextView messageTitleTextView; @BindView(R.id.bytes_count_edit_text) EditText bytesCountEditText; @BindView(R.id.bytes_count_text_view) TextView bytesCountTextView; @BindView(R.id.communication_layout) LinearLayout communicationLayout; @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { RelativeLayout mRootView = (RelativeLayout) inflater.inflate(R.layout.fragment_comm, container, false); ButterKnife.bind(this, mRootView); return mRootView; } @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); configureUI(); } @Override public void onPause() { getViewController().onCommFragmentPaused(); super.onPause(); } @Override public void onResume() { super.onResume(); getViewController().onCommFragmentResumed(); } private void configureUI() { disconnectButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { disconnectButton.setEnabled(false); getViewController().onDisconnectRequested(); } }); loopBackModeCheckBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked){ ascendingBytesModeCheckBox.setChecked(false); streamCheckBox.setChecked(false); ascendingBytesModeCheckBox.setChecked(false); ascendingBytesModeCheckBox.setEnabled(false); streamCheckBox.setEnabled(false); messageEditText.setText(""); messageEditText.setEnabled(false); sendDataButton.setVisibility(View.GONE); } else { messageEditText.setEnabled(true); sendDataButton.setVisibility(View.VISIBLE); ascendingBytesModeCheckBox.setEnabled(true); streamCheckBox.setEnabled(true); } } }); ascendingBytesModeCheckBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked){ bytesCountTextView.setVisibility(View.VISIBLE); bytesCountEditText.setVisibility(View.VISIBLE); messageEditText.setEnabled(false); messageEditText.setText(getAscendingBytesDataString(Integer.parseInt(bytesCountEditText.getText().toString()))); loopBackModeCheckBox.setEnabled(false); bytesCountEditText.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { try { messageEditText.setText(getAscendingBytesDataString(Integer.parseInt(bytesCountEditText.getText().toString()))); } catch(Exception e) { messageEditText.setText(getAscendingBytesDataString(1)); e.printStackTrace(); } } @Override public void afterTextChanged(Editable s) { } }); } else { if(!streamCheckBox.isChecked()) { loopBackModeCheckBox.setEnabled(true); } bytesCountTextView.setVisibility(View.GONE); bytesCountEditText.setVisibility(View.GONE); messageEditText.setEnabled(true); messageTitleTextView.setText(getResources().getString(R.string.string_message)); messageEditText.setText(""); } } }); streamCheckBox.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if(isChecked) { loopBackModeCheckBox.setEnabled(false); streamDelayEditText.setVisibility(View.VISIBLE); streamIterationsEditText.setVisibility(View.VISIBLE); iterationsTextView.setVisibility(View.VISIBLE); iterationDelayTextView.setVisibility(View.VISIBLE); } else { if(!ascendingBytesModeCheckBox.isChecked()) { loopBackModeCheckBox.setEnabled(true); } streamDelayEditText.setVisibility(View.GONE); streamIterationsEditText.setVisibility(View.GONE); iterationsTextView.setVisibility(View.GONE); iterationDelayTextView.setVisibility(View.GONE); } } }); sendDataButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { requestSend(); } }); } public void requestSend() { String output = messageEditText.getText().toString(); if(output.isEmpty()) { ToastUtil.showToast(getContext(),"message is empty"); return; } if(streamCheckBox.isChecked()) { final int iterations = Integer.parseInt(streamIterationsEditText.getText().toString()); if(iterations > 0) { sendDataButton.setEnabled(false); final String message = output; final int delay = Integer.parseInt(streamDelayEditText.getText().toString()); new Thread(new Runnable() { @Override public void run() { for (int i = 0; i < iterations; i++) { getViewController().onSendMessageRequested(message.getBytes(), LongDataProtocolV3.DATA_TYPE_STRING); try { Thread.sleep(delay); } catch (InterruptedException e) { e.printStackTrace(); } } } }).start(); } } else { getViewController().onSendMessageRequested(output.getBytes(), LongDataProtocolV3.DATA_TYPE_STRING); } } private String getAscendingBytesDataString(int count) { StringBuilder output = new StringBuilder(); int startIndex = 48; int endIndex = 126; int a = 0; for(int i=0; i<count; i++) { int asciiIndex = startIndex+i-(endIndex-startIndex+1)*a; if(asciiIndex == endIndex+1){ a++; asciiIndex = startIndex+i-(endIndex-startIndex+1)*a; } output.append(Character.toString((char)asciiIndex)); } return output.toString(); } public void onDataReceived(String msg) { responseTextView.setText(msg); if (loopBackModeCheckBox.isChecked()) { messageEditText.setText(msg); sendDataButton.callOnClick(); } } public void onDataSent(final String sentData) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { if(streamCheckBox.isChecked()) { streamIterationsEditText.setText(String.valueOf(Integer.parseInt(streamIterationsEditText.getText().toString())-1)); if(Integer.parseInt(streamIterationsEditText.getText().toString())==0){ sendDataButton.setEnabled(true); } } } }); if(sentData==null) ToastUtil.showToast(getContext(),"Data sending error"); else { try { getActivity().runOnUiThread(new Runnable() { @Override public void run() { messageEditText.setText(sentData); } }); } catch (Exception e) { e.printStackTrace(); } } } }
package org.editorconfig.configmanagement; import com.intellij.application.options.CodeStyle; import com.intellij.ide.actions.ShowSettingsUtilImpl; import com.intellij.ide.util.PropertiesComponent; import com.intellij.notification.Notification; import com.intellij.notification.NotificationDisplayType; import com.intellij.notification.NotificationGroup; import com.intellij.notification.NotificationType; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.project.DumbAwareAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CommonCodeStyleSettings.IndentOptions; import com.intellij.psi.codeStyle.FileIndentOptionsProvider; import com.intellij.util.containers.ContainerUtil; import org.editorconfig.Utils; import org.editorconfig.core.EditorConfig; import org.editorconfig.language.messages.EditorConfigBundle; import org.editorconfig.plugincomponents.SettingsProviderComponent; import org.editorconfig.settings.EditorConfigSettings; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.List; /** * @author Dennis.Ushakov */ public class EditorConfigIndentOptionsProvider extends FileIndentOptionsProvider { // Handles the following EditorConfig settings: public static final String indentSizeKey = "indent_size"; public static final String continuationSizeKey = "continuation_indent_size"; public static final String tabWidthKey = "tab_width"; public static final String indentStyleKey = "indent_style"; private static final String PROJECT_ADVERTISEMENT_FLAG = "editor.config.ad.shown"; private static final NotificationGroup NOTIFICATION_GROUP = new NotificationGroup("EditorConfig", NotificationDisplayType.STICKY_BALLOON, true); @Nullable @Override public IndentOptions getIndentOptions(@NotNull CodeStyleSettings settings, @NotNull PsiFile psiFile) { final VirtualFile file = psiFile.getVirtualFile(); if (file == null) return null; final Project project = psiFile.getProject(); if (project.isDisposed() || !Utils.isEnabled(settings)) return null; // Get editorconfig settings final List<EditorConfig.OutPair> outPairs = SettingsProviderComponent.getInstance().getOutPairs(project, file); // Apply editorconfig settings for the current editor return applyCodeStyleSettings(project, outPairs, file, settings); } private static IndentOptions applyCodeStyleSettings(Project project, final List<EditorConfig.OutPair> outPairs, final VirtualFile file, final CodeStyleSettings settings) { // Apply indent options final String indentSize = Utils.configValueForKey(outPairs, indentSizeKey); final String continuationIndentSize = Utils.configValueForKey(outPairs, continuationSizeKey); final String tabWidth = Utils.configValueForKey(outPairs, tabWidthKey); final String indentStyle = Utils.configValueForKey(outPairs, indentStyleKey); final IndentOptions indentOptions = (IndentOptions)settings.getIndentOptions(file.getFileType()).clone(); if (applyIndentOptions(project, indentOptions, indentSize, continuationIndentSize, tabWidth, indentStyle, file.getCanonicalPath())) { indentOptions.setOverrideLanguageOptions(true); return indentOptions; } return null; } private static boolean applyIndentOptions(Project project, IndentOptions indentOptions, String indentSize, String continuationIndentSize, String tabWidth, String indentStyle, String filePath) { boolean changed = false; final String calculatedIndentSize = calculateIndentSize(tabWidth, indentSize); final String calculatedContinuationSize = calculateContinuationIndentSize(calculatedIndentSize, continuationIndentSize); final String calculatedTabWidth = calculateTabWidth(tabWidth, indentSize); if (!calculatedIndentSize.isEmpty()) { if (applyIndentSize(indentOptions, calculatedIndentSize)) { changed = true; } else { Utils.invalidConfigMessage(project, calculatedIndentSize, indentSizeKey, filePath); } } if (!calculatedContinuationSize.isEmpty()) { if (applyContinuationIndentSize(indentOptions, calculatedContinuationSize)) { changed = true; } else { Utils.invalidConfigMessage(project, calculatedIndentSize, indentSizeKey, filePath); } } if (!calculatedTabWidth.isEmpty()) { if (applyTabWidth(indentOptions, calculatedTabWidth)) { changed = true; } else { Utils.invalidConfigMessage(project, calculatedTabWidth, tabWidthKey, filePath); } } if (!indentStyle.isEmpty()) { if (applyIndentStyle(indentOptions, indentStyle)) { changed = true; } else { Utils.invalidConfigMessage(project, indentStyle, indentStyleKey, filePath); } } return changed; } private static String calculateIndentSize(final String tabWidth, final String indentSize) { return indentSize.equals("tab") ? tabWidth : indentSize; } private static String calculateContinuationIndentSize(final String indentSize, final String continuationIndentSize) { return continuationIndentSize.isEmpty() ? indentSize : continuationIndentSize; } private static String calculateTabWidth(final String tabWidth, final String indentSize) { if (tabWidth.isEmpty() && indentSize.equals("tab")) { return ""; } else if (tabWidth.isEmpty()) { return indentSize; } else { return tabWidth; } } private static boolean applyIndentSize(final IndentOptions indentOptions, final String indentSize) { try { indentOptions.INDENT_SIZE = Integer.parseInt(indentSize); return true; } catch (NumberFormatException e) { return false; } } private static boolean applyContinuationIndentSize(final IndentOptions indentOptions, final String continuationIndentSize) { try { indentOptions.CONTINUATION_INDENT_SIZE = Integer.parseInt(continuationIndentSize); return true; } catch (NumberFormatException e) { return false; } } private static boolean applyTabWidth(final IndentOptions indentOptions, final String tabWidth) { try { indentOptions.TAB_SIZE = Integer.parseInt(tabWidth); return true; } catch (NumberFormatException e) { return false; } } private static boolean applyIndentStyle(IndentOptions indentOptions, String indentStyle) { if (indentStyle.equals("tab") || indentStyle.equals("space")) { indentOptions.USE_TAB_CHARACTER = indentStyle.equals("tab"); return true; } return false; } @Override public boolean areActionsAvailable(@NotNull VirtualFile file, @NotNull IndentOptions indentOptions) { return isEditorConfigOptions(indentOptions); } @Nullable @Override public AnAction[] getActions(@NotNull PsiFile file, @NotNull IndentOptions indentOptions) { if (isEditorConfigOptions(indentOptions)) { List<AnAction> actions = ContainerUtil.newArrayList(); actions.addAll(EditorConfigNavigationActionsFactory.getNavigationActions(file)); return actions.toArray(AnAction.EMPTY_ARRAY); } return null; } @Nullable @Override public AnAction createDisableAction(@NotNull Project project) { return DumbAwareAction.create( EditorConfigBundle.message("action.disable"), e -> { EditorConfigSettings settings = CodeStyle.getSettings(project).getCustomSettings(EditorConfigSettings.class); settings.ENABLED = false; notifyIndentOptionsChanged(project, null); showDisabledDetectionNotification(project); }); } @Nullable @Override public String getHint(@NotNull IndentOptions indentOptions) { return isEditorConfigOptions(indentOptions) ? "EditorConfig" : null; } private static boolean isEditorConfigOptions(@NotNull IndentOptions indentOptions) { return indentOptions.getFileIndentOptionsProvider() instanceof EditorConfigIndentOptionsProvider; } @Nullable @Override public String getAdvertisementText(@NotNull PsiFile psiFile, @NotNull IndentOptions indentOptions) { final PropertiesComponent projectProperties = PropertiesComponent.getInstance(psiFile.getProject()); boolean adFlag = projectProperties.getBoolean(PROJECT_ADVERTISEMENT_FLAG); if (adFlag) return null; projectProperties.setValue(PROJECT_ADVERTISEMENT_FLAG, true); return EditorConfigBundle.message("advertisement.text"); } private static void showDisabledDetectionNotification(@NotNull Project project) { EditorConfigDisabledNotification notification = new EditorConfigDisabledNotification(project); notification.notify(project); } private static class EditorConfigDisabledNotification extends Notification { private EditorConfigDisabledNotification(Project project) { super(NOTIFICATION_GROUP.getDisplayId(), EditorConfigBundle.message("disabled.notification"), "", NotificationType.INFORMATION); addAction(new ReEnableAction(project, this)); addAction(new ShowEditorConfigOption(ApplicationBundle.message("code.style.indent.provider.notification.settings"))); } } private static class ShowEditorConfigOption extends DumbAwareAction { private ShowEditorConfigOption(@Nullable String text) { super(text); } @Override public void actionPerformed(@NotNull AnActionEvent e) { ShowSettingsUtilImpl.showSettingsDialog(e.getProject(), "preferences.sourceCode", "EditorConfig"); } } private static class ReEnableAction extends DumbAwareAction { private final Project myProject; private final Notification myNotification; private ReEnableAction(@NotNull Project project, Notification notification) { super(ApplicationBundle.message("code.style.indent.provider.notification.re.enable")); myProject = project; myNotification = notification; } @Override public void actionPerformed(@NotNull AnActionEvent e) { EditorConfigSettings settings = CodeStyle.getSettings(myProject).getCustomSettings(EditorConfigSettings.class); settings.ENABLED = true; notifyIndentOptionsChanged(myProject, null); myNotification.expire(); } } @Override public boolean isShowFileIndentOptionsEnabled() { return false; } }
/** * generated by Xtext 2.12.0 */ package gw4e.eclipse.dsl.tests; import com.google.inject.Inject; import gw4e.eclipse.dsl.dSLPolicies.Model; import gw4e.eclipse.dsl.tests.DSLPoliciesInjectorProvider; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.resource.Resource; import org.eclipse.xtext.testing.InjectWith; import org.eclipse.xtext.testing.XtextRunner; import org.eclipse.xtext.testing.util.ParseHelper; import org.eclipse.xtext.xbase.lib.Exceptions; import org.eclipse.xtext.xbase.lib.InputOutput; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(XtextRunner.class) @InjectWith(DSLPoliciesInjectorProvider.class) @SuppressWarnings("all") public class DSLPoliciesParsingTest { @Inject private ParseHelper<Model> parseHelper; @Test public void testMultipleGeneratorStopConditionSpaceSeparated() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=random(reached_vertex(v_VerifyAppRunning)) a_star(reached_edge(e_enterSearchedWord));I;a_star(reached_edge(e_enterSearchedWord));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testRandomReachedVertex() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=random(reached_vertex(v_VerifyAppRunning));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testRandomReachedEdge() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=random(reached_edge(v_VerifyAppRunning));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testRandomDependencyEdgeCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=random(dependency_edge_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testRandomEdgeCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=random(edge_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testRandomVertexCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=random(vertex_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testRandomRequirementCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=random(requirement_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testRandomTimeDuration() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=random(time_duration(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testWeightedRandomReachedVertex() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=weighted_random(reached_vertex(v_VerifyAppRunning));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testWeightedRandomReachedEdge() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=weighted_random(reached_edge(v_VerifyAppRunning));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testWeightedRandomDependencyEdgeCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=weighted_random(dependency_edge_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testWeightedRandomEdgeCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=weighted_random(edge_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testWeightedRandomVertexCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=weighted_random(vertex_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testWeightedRandomRequirementCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=weighted_random(requirement_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testWeightedRandomTimeDuration() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=weighted_random(time_duration(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testQuickRandomReachedVertex() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=quick_random(reached_vertex(v_VerifyAppRunning));I;")); Assert.assertNotNull(result); InputOutput.<EList<Resource.Diagnostic>>print(result.eResource().getErrors()); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testQuickRandomReachedEdge() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=quick_random(reached_edge(v_VerifyAppRunning));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testQuickRandomDependencyEdgeCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=quick_random(dependency_edge_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testQuickRandomEdgeCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=quick_random(edge_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testQuickRandomVertexCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=quick_random(vertex_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testQuickRandomRequirementCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=quick_random(requirement_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testQuickRandomTimeDuration() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=quick_random(time_duration(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testAStartReachedVertex() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=a_star(reached_vertex(v_VerifyAppRunning));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testAStartReachedEdge() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=a_star(reached_edge(v_VerifyAppRunning));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testAStartDependencyEdgeCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=a_star(dependency_edge_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testAStartEdgeCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=a_star(edge_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testAStartVertexCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=a_star(vertex_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testAStartRequirementCoverage() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=a_star(requirement_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testAStartTimeDuration() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=a_star(time_duration(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testMulti() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=a_star(time_duration(100));I;quick_random(time_duration(100));I;random(requirement_coverage(100));I;")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testNocheck() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=nocheck")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } @Test public void testSync() { try { final Model result = this.parseHelper.parse(("\n" + "Simple.json=sync")); Assert.assertNotNull(result); Assert.assertTrue(result.eResource().getErrors().isEmpty()); } catch (Throwable _e) { throw Exceptions.sneakyThrow(_e); } } }
package org.wikimedia.commons; import java.io.*; import java.util.ArrayList; import java.util.Date; import java.util.regex.Matcher; import java.util.regex.Pattern; import android.graphics.*; import android.os.Bundle; import org.mediawiki.api.*; import in.yuvi.http.fluent.ProgressListener; import android.app.*; import android.content.*; import android.support.v4.app.NotificationCompat; import android.util.*; import android.widget.*; import org.wikimedia.commons.contributions.*; import org.wikimedia.commons.modifications.ModificationsContentProvider; public class UploadService extends HandlerService<Contribution> { private static final String EXTRA_PREFIX = "org.wikimedia.commons.upload"; public static final int ACTION_UPLOAD_FILE = 1; public static final String ACTION_START_SERVICE = EXTRA_PREFIX + ".upload"; public static final String EXTRA_SOURCE = EXTRA_PREFIX + ".source"; private NotificationManager notificationManager; private ContentProviderClient contributionsProviderClient; private CommonsApplication app; private NotificationCompat.Builder curProgressNotification; private int toUpload; // DO NOT HAVE NOTIFICATION ID OF 0 FOR ANYTHING // See http://stackoverflow.com/questions/8725909/startforeground-does-not-show-my-notification // Seriously, Android? public static final int NOTIFICATION_UPLOAD_IN_PROGRESS = 1; public static final int NOTIFICATION_UPLOAD_COMPLETE = 2; public static final int NOTIFICATION_UPLOAD_FAILED = 3; public UploadService() { super("UploadService"); } private class NotificationUpdateProgressListener implements ProgressListener { String notificationTag; boolean notificationTitleChanged; Contribution contribution; String notificationProgressTitle; String notificationFinishingTitle; public NotificationUpdateProgressListener(String notificationTag, String notificationProgressTitle, String notificationFinishingTitle, Contribution contribution) { this.notificationTag = notificationTag; this.notificationProgressTitle = notificationProgressTitle; this.notificationFinishingTitle = notificationFinishingTitle; this.contribution = contribution; } @Override public void onProgress(long transferred, long total) { Log.d("Commons", String.format("Uploaded %d of %d", transferred, total)); if(!notificationTitleChanged) { curProgressNotification.setContentTitle(notificationProgressTitle); notificationTitleChanged = true; contribution.setState(Contribution.STATE_IN_PROGRESS); } if(transferred == total) { // Completed! curProgressNotification.setContentTitle(notificationFinishingTitle); curProgressNotification.setProgress(0, 100, true); } else { curProgressNotification.setProgress(100, (int) (((double) transferred / (double) total) * 100), false); } startForeground(NOTIFICATION_UPLOAD_IN_PROGRESS, curProgressNotification.build()); contribution.setTransferred(transferred); contribution.save(); } } @Override public void onDestroy() { super.onDestroy(); contributionsProviderClient.release(); Log.d("Commons", "ZOMG I AM BEING KILLED HALP!"); } @Override public void onCreate() { super.onCreate(); notificationManager = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); app = (CommonsApplication) this.getApplicationContext(); contributionsProviderClient = this.getContentResolver().acquireContentProviderClient(ContributionsContentProvider.AUTHORITY); } @Override protected void handle(int what, Contribution contribution) { switch(what) { case ACTION_UPLOAD_FILE: uploadContribution(contribution); break; default: throw new IllegalArgumentException("Unknown value for what"); } } @Override public void queue(int what, Contribution contribution) { switch (what) { case ACTION_UPLOAD_FILE: contribution.setState(Contribution.STATE_QUEUED); contribution.setTransferred(0); contribution.setContentProviderClient(contributionsProviderClient); contribution.save(); toUpload++; if (curProgressNotification != null && toUpload != 1) { curProgressNotification.setContentText(getResources().getQuantityString(R.plurals.uploads_pending_notification_indicator, toUpload, toUpload)); Log.d("Commons", String.format("%d uploads left", toUpload)); this.startForeground(NOTIFICATION_UPLOAD_IN_PROGRESS, curProgressNotification.build()); } super.queue(what, contribution); break; default: throw new IllegalArgumentException("Unknown value for what"); } } private boolean freshStart = true; @Override public int onStartCommand(Intent intent, int flags, int startId) { if(intent.getAction() == ACTION_START_SERVICE && freshStart) { ContentValues failedValues = new ContentValues(); failedValues.put(Contribution.Table.COLUMN_STATE, Contribution.STATE_FAILED); int updated = getContentResolver().update(ContributionsContentProvider.BASE_URI, failedValues, Contribution.Table.COLUMN_STATE + " = ? OR " + Contribution.Table.COLUMN_STATE + " = ?", new String[]{ String.valueOf(Contribution.STATE_QUEUED), String.valueOf(Contribution.STATE_IN_PROGRESS) } ); Log.d("Commons", "Set " + updated + " uploads to failed"); Log.d("Commons", "Flags is" + flags + " id is" + startId); freshStart = false; } return START_REDELIVER_INTENT; } private void uploadContribution(Contribution contribution) { MWApi api = app.getApi(); ApiResult result; InputStream file = null; String notificationTag = contribution.getLocalUri().toString(); try { file = this.getContentResolver().openInputStream(contribution.getLocalUri()); } catch(FileNotFoundException e) { throw new RuntimeException(e); } Log.d("Commons", "Before execution!"); curProgressNotification = new NotificationCompat.Builder(this).setAutoCancel(true) .setSmallIcon(R.drawable.ic_launcher) .setLargeIcon(BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher)) .setAutoCancel(true) .setContentTitle(String.format(getString(R.string.upload_progress_notification_title_start), contribution.getDisplayTitle())) .setContentText(getResources().getQuantityString(R.plurals.uploads_pending_notification_indicator, toUpload, toUpload)) .setOngoing(true) .setProgress(100, 0, true) .setContentIntent(PendingIntent.getActivity(getApplicationContext(), 0, new Intent(this, ContributionsActivity.class), 0)) .setTicker(String.format(getString(R.string.upload_progress_notification_title_in_progress), contribution.getDisplayTitle())); this.startForeground(NOTIFICATION_UPLOAD_IN_PROGRESS, curProgressNotification.build()); try { String filename = findUniqueFilename(contribution.getFilename()); if(!api.validateLogin()) { // Need to revalidate! if(app.revalidateAuthToken()) { Log.d("Commons", "Successfully revalidated token!"); } else { Log.d("Commons", "Unable to revalidate :("); // TODO: Put up a new notification, ask them to re-login stopForeground(true); Toast failureToast = Toast.makeText(this, R.string.authentication_failed, Toast.LENGTH_LONG); failureToast.show(); return; } } NotificationUpdateProgressListener notificationUpdater = new NotificationUpdateProgressListener(notificationTag, String.format(getString(R.string.upload_progress_notification_title_in_progress), contribution.getDisplayTitle()), String.format(getString(R.string.upload_progress_notification_title_finishing), contribution.getDisplayTitle()), contribution ); result = api.upload(filename, file, contribution.getDataLength(), contribution.getPageContents(), contribution.getEditSummary(), notificationUpdater); Log.d("Commons", "Response is" + Utils.getStringFromDOM(result.getDocument())); curProgressNotification = null; String resultStatus = result.getString("/api/upload/@result"); if(!resultStatus.equals("Success")) { String errorCode = result.getString("/api/error/@code"); showFailedNotification(contribution); EventLog.schema(CommonsApplication.EVENT_UPLOAD_ATTEMPT) .param("username", app.getCurrentAccount().name) .param("source", contribution.getSource()) .param("multiple", contribution.getMultiple()) .param("result", errorCode) .param("filename", contribution.getFilename()) .log(); } else { Date dateUploaded = null; dateUploaded = Utils.parseMWDate(result.getString("/api/upload/imageinfo/@timestamp")); String canonicalFilename = "File:" + result.getString("/api/upload/@filename").replace("_", " "); // Title vs Filename String imageUrl = result.getString("/api/upload/imageinfo/@url"); contribution.setFilename(canonicalFilename); contribution.setImageUrl(imageUrl); contribution.setState(Contribution.STATE_COMPLETED); contribution.setDateUploaded(dateUploaded); contribution.save(); EventLog.schema(CommonsApplication.EVENT_UPLOAD_ATTEMPT) .param("username", app.getCurrentAccount().name) .param("source", contribution.getSource()) //FIXME .param("filename", contribution.getFilename()) .param("multiple", contribution.getMultiple()) .param("result", "success") .log(); } } catch(IOException e) { Log.d("Commons", "I have a network fuckup"); showFailedNotification(contribution); return; } finally { toUpload--; if(toUpload == 0) { // Sync modifications right after all uplaods are processed ContentResolver.requestSync(((CommonsApplication) getApplicationContext()).getCurrentAccount(), ModificationsContentProvider.AUTHORITY, new Bundle()); stopForeground(true); } } } private void showFailedNotification(Contribution contribution) { Notification failureNotification = new NotificationCompat.Builder(this).setAutoCancel(true) .setSmallIcon(R.drawable.ic_launcher) .setAutoCancel(true) .setContentIntent(PendingIntent.getActivity(this, 0, new Intent(this, ContributionsActivity.class), 0)) .setTicker(String.format(getString(R.string.upload_failed_notification_title), contribution.getDisplayTitle())) .setContentTitle(String.format(getString(R.string.upload_failed_notification_title), contribution.getDisplayTitle())) .setContentText(getString(R.string.upload_failed_notification_subtitle)) .build(); notificationManager.notify(NOTIFICATION_UPLOAD_FAILED, failureNotification); contribution.setState(Contribution.STATE_FAILED); contribution.save(); } private String findUniqueFilename(String fileName) throws IOException { return findUniqueFilename(fileName, 1); } private String findUniqueFilename(String fileName, int sequenceNumber) throws IOException { String sequenceFileName; if (sequenceNumber == 1) { sequenceFileName = fileName; } else { if (fileName.indexOf('.') == -1) { // We really should have appended a file type suffix already. // But... we might not. sequenceFileName = fileName + " " + sequenceNumber; } else { Pattern regex = Pattern.compile("^(.*)(\\..+?)$"); Matcher regexMatcher = regex.matcher(fileName); sequenceFileName = regexMatcher.replaceAll("$1 " + sequenceNumber + "$2"); } } Log.d("Commons", "checking for uniqueness of name " + sequenceFileName); if (fileExistsWithName(sequenceFileName)) { return findUniqueFilename(fileName, sequenceNumber + 1); } else { return sequenceFileName; } } private boolean fileExistsWithName(String fileName) throws IOException { MWApi api = app.getApi(); ApiResult result; result = api.action("query") .param("prop", "imageinfo") .param("titles", "File:" + fileName) .get(); ArrayList<ApiResult> nodes = result.getNodes("/api/query/pages/page/imageinfo"); return nodes.size() > 0; } }
/** * * Copyright (c) Microsoft and contributors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. package com.microsoft.windowsazure.management.compute.models; import com.microsoft.windowsazure.core.LazyArrayList; import java.net.URI; import java.util.ArrayList; /** * Details of a role in a deployment. */ public class Role { private String availabilitySetName; /** * Optional. The name of the role. * @return The AvailabilitySetName value. */ public String getAvailabilitySetName() { return this.availabilitySetName; } /** * Optional. The name of the role. * @param availabilitySetNameValue The AvailabilitySetName value. */ public void setAvailabilitySetName(final String availabilitySetNameValue) { this.availabilitySetName = availabilitySetNameValue; } private ArrayList<ConfigurationSet> configurationSets; /** * Optional. A collection of values that represents system or application * configuration settings. * @return The ConfigurationSets value. */ public ArrayList<ConfigurationSet> getConfigurationSets() { return this.configurationSets; } /** * Optional. A collection of values that represents system or application * configuration settings. * @param configurationSetsValue The ConfigurationSets value. */ public void setConfigurationSets(final ArrayList<ConfigurationSet> configurationSetsValue) { this.configurationSets = configurationSetsValue; } private ArrayList<DataVirtualHardDisk> dataVirtualHardDisks; /** * Optional. Contains the parameters Azure uses to create a data disk for a * virtual machine. * @return The DataVirtualHardDisks value. */ public ArrayList<DataVirtualHardDisk> getDataVirtualHardDisks() { return this.dataVirtualHardDisks; } /** * Optional. Contains the parameters Azure uses to create a data disk for a * virtual machine. * @param dataVirtualHardDisksValue The DataVirtualHardDisks value. */ public void setDataVirtualHardDisks(final ArrayList<DataVirtualHardDisk> dataVirtualHardDisksValue) { this.dataVirtualHardDisks = dataVirtualHardDisksValue; } private String defaultWinRmCertificateThumbprint; /** * Optional. The read-only thumbprint of the certificate that is used with * the HTTPS listener for WinRM. * @return The DefaultWinRmCertificateThumbprint value. */ public String getDefaultWinRmCertificateThumbprint() { return this.defaultWinRmCertificateThumbprint; } /** * Optional. The read-only thumbprint of the certificate that is used with * the HTTPS listener for WinRM. * @param defaultWinRmCertificateThumbprintValue The * DefaultWinRmCertificateThumbprint value. */ public void setDefaultWinRmCertificateThumbprint(final String defaultWinRmCertificateThumbprintValue) { this.defaultWinRmCertificateThumbprint = defaultWinRmCertificateThumbprintValue; } private String label; /** * Optional. The friendly name for the role. * @return The Label value. */ public String getLabel() { return this.label; } /** * Optional. The friendly name for the role. * @param labelValue The Label value. */ public void setLabel(final String labelValue) { this.label = labelValue; } private URI mediaLocation; /** * Optional. Storage location where the VM Image VHDs should be copied, for * published VM Images. * @return The MediaLocation value. */ public URI getMediaLocation() { return this.mediaLocation; } /** * Optional. Storage location where the VM Image VHDs should be copied, for * published VM Images. * @param mediaLocationValue The MediaLocation value. */ public void setMediaLocation(final URI mediaLocationValue) { this.mediaLocation = mediaLocationValue; } private String oSVersion; /** * Optional. The version of the operating system on which the role instances * are running. * @return The OSVersion value. */ public String getOSVersion() { return this.oSVersion; } /** * Optional. The version of the operating system on which the role instances * are running. * @param oSVersionValue The OSVersion value. */ public void setOSVersion(final String oSVersionValue) { this.oSVersion = oSVersionValue; } private OSVirtualHardDisk oSVirtualHardDisk; /** * Optional. Contains the parameters Azure uses to create the operating * system disk for the virtual machine. * @return The OSVirtualHardDisk value. */ public OSVirtualHardDisk getOSVirtualHardDisk() { return this.oSVirtualHardDisk; } /** * Optional. Contains the parameters Azure uses to create the operating * system disk for the virtual machine. * @param oSVirtualHardDiskValue The OSVirtualHardDisk value. */ public void setOSVirtualHardDisk(final OSVirtualHardDisk oSVirtualHardDiskValue) { this.oSVirtualHardDisk = oSVirtualHardDiskValue; } private Boolean provisionGuestAgent; /** * Optional. Indicates whether the WindowsAzureGuestAgent service is * installed on the Virtual Machine. To run a resource extension in a * Virtual Machine, this service must be installed. * @return The ProvisionGuestAgent value. */ public Boolean isProvisionGuestAgent() { return this.provisionGuestAgent; } /** * Optional. Indicates whether the WindowsAzureGuestAgent service is * installed on the Virtual Machine. To run a resource extension in a * Virtual Machine, this service must be installed. * @param provisionGuestAgentValue The ProvisionGuestAgent value. */ public void setProvisionGuestAgent(final Boolean provisionGuestAgentValue) { this.provisionGuestAgent = provisionGuestAgentValue; } private ArrayList<ResourceExtensionReference> resourceExtensionReferences; /** * Optional. Contains a collection of resource extensions that are to be * installed on the Virtual Machine. This element is used if * ProvisionGuestAgent is set to true. * @return The ResourceExtensionReferences value. */ public ArrayList<ResourceExtensionReference> getResourceExtensionReferences() { return this.resourceExtensionReferences; } /** * Optional. Contains a collection of resource extensions that are to be * installed on the Virtual Machine. This element is used if * ProvisionGuestAgent is set to true. * @param resourceExtensionReferencesValue The ResourceExtensionReferences * value. */ public void setResourceExtensionReferences(final ArrayList<ResourceExtensionReference> resourceExtensionReferencesValue) { this.resourceExtensionReferences = resourceExtensionReferencesValue; } private String roleName; /** * Optional. The name of the role. * @return The RoleName value. */ public String getRoleName() { return this.roleName; } /** * Optional. The name of the role. * @param roleNameValue The RoleName value. */ public void setRoleName(final String roleNameValue) { this.roleName = roleNameValue; } private String roleSize; /** * Optional. The size of the role instance. * @return The RoleSize value. */ public String getRoleSize() { return this.roleSize; } /** * Optional. The size of the role instance. * @param roleSizeValue The RoleSize value. */ public void setRoleSize(final String roleSizeValue) { this.roleSize = roleSizeValue; } private String roleType; /** * Optional. Specifies the type of the role. This element is only listed for * Virtual Machine deployments, and by default is PersistentVMRole. * @return The RoleType value. */ public String getRoleType() { return this.roleType; } /** * Optional. Specifies the type of the role. This element is only listed for * Virtual Machine deployments, and by default is PersistentVMRole. * @param roleTypeValue The RoleType value. */ public void setRoleType(final String roleTypeValue) { this.roleType = roleTypeValue; } private VMImageInput vMImageInput; /** * Optional. When a VM Image is used to create a new PersistantVMRole, the * DiskConfigurations in the VM Image are used to create new Disks for the * new VM. This parameter can be used to resize the newly created Disks to * a larger size than the underlying DiskConfigurations in the VM * Image.This property is only returned with a version header of 2014-10-01 * or newer. * @return The VMImageInput value. */ public VMImageInput getVMImageInput() { return this.vMImageInput; } /** * Optional. When a VM Image is used to create a new PersistantVMRole, the * DiskConfigurations in the VM Image are used to create new Disks for the * new VM. This parameter can be used to resize the newly created Disks to * a larger size than the underlying DiskConfigurations in the VM * Image.This property is only returned with a version header of 2014-10-01 * or newer. * @param vMImageInputValue The VMImageInput value. */ public void setVMImageInput(final VMImageInput vMImageInputValue) { this.vMImageInput = vMImageInputValue; } private String vMImageName; /** * Optional. Optional. The name of the VMImage from which this Role is to be * created. If the OSDisk in the VMImage was Specialized, then no * WindowsProvisioningConfigurationSet or LinuxProvisioningConfigurationSet * should be provided. No OSVirtualHardDisk or DataVirtualHardDisk should * be specified when using this argument. * @return The VMImageName value. */ public String getVMImageName() { return this.vMImageName; } /** * Optional. Optional. The name of the VMImage from which this Role is to be * created. If the OSDisk in the VMImage was Specialized, then no * WindowsProvisioningConfigurationSet or LinuxProvisioningConfigurationSet * should be provided. No OSVirtualHardDisk or DataVirtualHardDisk should * be specified when using this argument. * @param vMImageNameValue The VMImageName value. */ public void setVMImageName(final String vMImageNameValue) { this.vMImageName = vMImageNameValue; } /** * Initializes a new instance of the Role class. * */ public Role() { this.setConfigurationSets(new LazyArrayList<ConfigurationSet>()); this.setDataVirtualHardDisks(new LazyArrayList<DataVirtualHardDisk>()); this.setResourceExtensionReferences(new LazyArrayList<ResourceExtensionReference>()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.sql.calcite.expression; import com.google.common.collect.ImmutableMap; import org.apache.calcite.avatica.util.TimeUnit; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.SqlIntervalQualifier; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.column.ValueType; import org.apache.druid.sql.calcite.expression.builtin.LeastOperatorConversion; import org.junit.Before; import org.junit.Test; import java.math.BigDecimal; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; public class LeastExpressionTest extends ExpressionTestBase { private static final String DOUBLE_KEY = "d"; private static final double DOUBLE_VALUE = 3.1; private static final String LONG_KEY = "l"; private static final long LONG_VALUE = 2L; private static final String STRING_KEY = "s"; private static final String STRING_VALUE = "foo"; private static final RowSignature ROW_SIGNATURE = RowSignature .builder() .add(DOUBLE_KEY, ValueType.DOUBLE) .add(LONG_KEY, ValueType.LONG) .add(STRING_KEY, ValueType.STRING) .build(); private static final Map<String, Object> BINDINGS = ImmutableMap.of( DOUBLE_KEY, DOUBLE_VALUE, LONG_KEY, LONG_VALUE, STRING_KEY, STRING_VALUE ); private LeastOperatorConversion target; private ExpressionTestHelper testHelper; @Before public void setUp() { target = new LeastOperatorConversion(); testHelper = new ExpressionTestHelper(ROW_SIGNATURE, BINDINGS); } @Test public void testNoArgs() { testExpression( Collections.emptyList(), buildExpectedExpression(), null ); } @Test public void testAllNull() { testExpression( Arrays.asList( testHelper.getConstantNull(), testHelper.getConstantNull() ), buildExpectedExpression(null, null), null ); } @Test public void testSomeNull() { testExpression( Arrays.asList( testHelper.makeInputRef(DOUBLE_KEY), testHelper.getConstantNull(), testHelper.makeInputRef(STRING_KEY) ), buildExpectedExpression( testHelper.makeVariable(DOUBLE_KEY), null, testHelper.makeVariable(STRING_KEY) ), String.valueOf(DOUBLE_VALUE) ); } @Test public void testAllDouble() { testExpression( Arrays.asList( testHelper.makeLiteral(34.1), testHelper.makeInputRef(DOUBLE_KEY), testHelper.makeLiteral(5.2), testHelper.makeLiteral(767.3) ), buildExpectedExpression( 34.1, testHelper.makeVariable(DOUBLE_KEY), 5.2, 767.3 ), 3.1 ); } @Test public void testAllLong() { testExpression( Arrays.asList( testHelper.makeInputRef(LONG_KEY), testHelper.makeLiteral(0) ), buildExpectedExpression( testHelper.makeVariable(LONG_KEY), 0 ), 0L ); } @Test public void testAllString() { testExpression( Arrays.asList( testHelper.makeLiteral("B"), testHelper.makeInputRef(STRING_KEY), testHelper.makeLiteral("A") ), buildExpectedExpression( "B", testHelper.makeVariable(STRING_KEY), "A" ), "A" ); } @Test public void testCoerceString() { testExpression( Arrays.asList( testHelper.makeLiteral(-1), testHelper.makeInputRef(DOUBLE_KEY), testHelper.makeLiteral("A") ), buildExpectedExpression( -1, testHelper.makeVariable(DOUBLE_KEY), "A" ), "-1" ); } @Test public void testCoerceDouble() { testExpression( Arrays.asList( testHelper.makeLiteral(-1), testHelper.makeInputRef(DOUBLE_KEY) ), buildExpectedExpression( -1, testHelper.makeVariable(DOUBLE_KEY) ), -1.0 ); } @Test public void testDecimal() { testExpression( Arrays.asList( testHelper.makeLiteral(BigDecimal.valueOf(1.2)), testHelper.makeLiteral(BigDecimal.valueOf(3.4)) ), buildExpectedExpression( 1.2, 3.4 ), 1.2 ); } @Test public void testDecimalWithNullShouldReturnString() { testExpression( Arrays.asList( testHelper.makeLiteral(BigDecimal.valueOf(1.2)), testHelper.makeLiteral(BigDecimal.valueOf(3.4)), testHelper.getConstantNull() ), buildExpectedExpression( 1.2, 3.4, null ), "1.2" ); } @Test public void testTimestamp() { testExpression( Arrays.asList( testHelper.makeLiteral(DateTimes.utc(1000)), testHelper.makeLiteral(DateTimes.utc(2000)) ), buildExpectedExpression( 1000, 2000 ), 1000L ); } @Test public void testInvalidType() { expectException(IllegalArgumentException.class, "Argument 0 has invalid type: INTERVAL_YEAR_MONTH"); testExpression( Collections.singletonList( testHelper.makeLiteral( new BigDecimal(13), // YEAR-MONTH literals value is months new SqlIntervalQualifier(TimeUnit.YEAR, TimeUnit.MONTH, SqlParserPos.ZERO) ) ), null, null ); } private void testExpression( List<? extends RexNode> exprs, final DruidExpression expectedExpression, final Object expectedResult ) { testHelper.testExpression(target.calciteOperator(), exprs, expectedExpression, expectedResult); } private DruidExpression buildExpectedExpression(Object... args) { return testHelper.buildExpectedExpression(target.getDruidFunctionName(), args); } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lang.ant.config.impl; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer; import com.intellij.ide.macro.Macro; import com.intellij.ide.macro.MacroManager; import com.intellij.lang.ant.config.AntBuildFileBase; import com.intellij.lang.ant.config.AntBuildModel; import com.intellij.lang.ant.config.AntBuildModelBase; import com.intellij.lang.ant.config.AntBuildTarget; import com.intellij.lang.ant.config.AntConfigurationBase; import com.intellij.lang.ant.dom.AntDomFileDescription; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.actionSystem.impl.SimpleDataContext; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.WriteExternalException; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.xml.XmlFile; import com.intellij.util.NewInstanceFactory; import com.intellij.util.SystemProperties; import com.intellij.util.config.AbstractProperty; import com.intellij.util.config.BooleanProperty; import com.intellij.util.config.ExternalizablePropertyContainer; import com.intellij.util.config.IntProperty; import com.intellij.util.config.ListProperty; import com.intellij.util.config.StringProperty; import com.intellij.util.config.ValueProperty; import java.util.HashMap; import consulo.roots.types.BinariesOrderRootType; import consulo.vfs.util.ArchiveVfsUtil; public class AntBuildFileImpl implements AntBuildFileBase { private static final Logger LOG = Logger.getInstance("#com.intellij.lang.ant.config.impl.AntBuildFileImpl"); @NonNls private static final String ANT_LIB = "/.ant/lib"; private volatile Map<String, String> myCachedExternalProperties; private final Object myOptionsLock = new Object(); public static final AbstractProperty<Sdk> ANT_INSTALLATION = new AbstractProperty<Sdk>() { @Override public String getName() { return "$antInstallation"; } @Override public Sdk getDefault(final AbstractPropertyContainer container) { return GlobalAntConfiguration.INSTANCE.get(container).findBundleAntBundle(); } @Override public Sdk copy(final Sdk value) { return value; } @Override public Sdk get(final AbstractPropertyContainer container) { if(container.hasProperty(ANT_REFERENCE)) { return RUN_WITH_ANT.get(container); } return GlobalAntConfiguration.INSTANCE.get(container).findBundleAntBundle(); } }; public static final AbstractProperty<List<File>> ALL_CLASS_PATH = new AbstractProperty<List<File>>() { @Override public String getName() { return "$allClasspath"; } @Override public List<File> getDefault(AbstractProperty.AbstractPropertyContainer container) { return get(container); } @Override public List<File> get(AbstractProperty.AbstractPropertyContainer container) { List<File> classpath = new ArrayList<File>(); collectClasspath(classpath, ADDITIONAL_CLASSPATH, container); Sdk antInstallation = ANT_INSTALLATION.get(container); if(antInstallation != null) { VirtualFile[] virtualFiles = antInstallation.getRootProvider().getFiles(BinariesOrderRootType.getInstance()); for(VirtualFile virtualFile : virtualFiles) { VirtualFile localFile = ArchiveVfsUtil.getVirtualFileForArchive(virtualFile); if(localFile != null) { classpath.add(VfsUtil.virtualToIoFile(localFile)); } } } return classpath; } private void collectClasspath(List<File> files, ListProperty<AntClasspathEntry> property, AbstractProperty.AbstractPropertyContainer container) { if(!container.hasProperty(property)) { return; } Iterator<AntClasspathEntry> entries = property.getIterator(container); while(entries.hasNext()) { AntClasspathEntry entry = entries.next(); entry.addFilesTo(files); } } @Override public void set(AbstractProperty.AbstractPropertyContainer container, List<File> files) { throw new UnsupportedOperationException(getName()); } @Override public List<File> copy(List<File> files) { return files; } }; public static final BooleanProperty RUN_IN_BACKGROUND = new BooleanProperty("runInBackground", true); public static final IntProperty MAX_HEAP_SIZE = new IntProperty("maximumHeapSize", 128); public static final IntProperty MAX_STACK_SIZE = new IntProperty("maximumStackSize", 2); public static final BooleanProperty VERBOSE = new BooleanProperty("verbose", true); public static final BooleanProperty TREE_VIEW = new BooleanProperty("treeView", true); public static final BooleanProperty CLOSE_ON_NO_ERRORS = new BooleanProperty("viewClosedWhenNoErrors", false); public static final StringProperty CUSTOM_JDK_NAME = new StringProperty("customJdkName", ""); public static final ListProperty<TargetFilter> TARGET_FILTERS = ListProperty.create("targetFilters"); public static final ListProperty<BuildFileProperty> ANT_PROPERTIES = ListProperty.create("properties"); public static final StringProperty ANT_COMMAND_LINE_PARAMETERS = new StringProperty("antCommandLine", ""); public static final AbstractProperty<AntReference> ANT_REFERENCE = new ValueProperty<AntReference>("antReference", AntReference.PROJECT_DEFAULT); public static final ListProperty<AntClasspathEntry> ADDITIONAL_CLASSPATH = ListProperty.create("additionalClassPath"); public static final AbstractProperty<Sdk> RUN_WITH_ANT = new AbstractProperty<Sdk>() { @Override public String getName() { return "$runWithAnt"; } @Override @Nullable public Sdk getDefault(AbstractProperty.AbstractPropertyContainer container) { return get(container); } @Override @Nullable public Sdk get(AbstractProperty.AbstractPropertyContainer container) { return AntReference.findAnt(ANT_REFERENCE, container); } @Override public Sdk copy(Sdk antInstallation) { return antInstallation; } }; private final VirtualFile myVFile; private final Project myProject; private final AntConfigurationBase myAntConfiguration; private final ExternalizablePropertyContainer myWorkspaceOptions; private final ExternalizablePropertyContainer myProjectOptions; private final AbstractProperty.AbstractPropertyContainer myAllOptions; private final ClassLoaderHolder myClassloaderHolder; private boolean myShouldExpand = true; public AntBuildFileImpl(final XmlFile antFile, final AntConfigurationBase configuration) { myVFile = antFile.getOriginalFile().getVirtualFile(); myProject = antFile.getProject(); myAntConfiguration = configuration; myWorkspaceOptions = new ExternalizablePropertyContainer(); myWorkspaceOptions.registerProperty(RUN_IN_BACKGROUND); myWorkspaceOptions.registerProperty(CLOSE_ON_NO_ERRORS); myWorkspaceOptions.registerProperty(TREE_VIEW); myWorkspaceOptions.registerProperty(VERBOSE); myWorkspaceOptions.registerProperty(TARGET_FILTERS, "filter", NewInstanceFactory.fromClass(TargetFilter.class)); myWorkspaceOptions.rememberKey(RUN_WITH_ANT); myProjectOptions = new ExternalizablePropertyContainer(); myProjectOptions.registerProperty(MAX_HEAP_SIZE); myProjectOptions.registerProperty(MAX_STACK_SIZE); myProjectOptions.registerProperty(CUSTOM_JDK_NAME); myProjectOptions.registerProperty(ANT_COMMAND_LINE_PARAMETERS); myProjectOptions.registerProperty(ANT_PROPERTIES, "property", NewInstanceFactory.fromClass(BuildFileProperty.class)); myProjectOptions.registerProperty(ADDITIONAL_CLASSPATH, "entry", SinglePathEntry.EXTERNALIZER); myProjectOptions.registerProperty(ANT_REFERENCE, AntReference.EXTERNALIZER); myAllOptions = new CompositePropertyContainer(new AbstractProperty.AbstractPropertyContainer[]{ myWorkspaceOptions, myProjectOptions, GlobalAntConfiguration.getInstance().getProperties(getProject()) }); myClassloaderHolder = new AntBuildFileClassLoaderHolder(myAllOptions); } public static List<File> getUserHomeLibraries() { ArrayList<File> classpath = new ArrayList<File>(); final String homeDir = SystemProperties.getUserHome(); new AllJarsUnderDirEntry(new File(homeDir, ANT_LIB)).addFilesTo(classpath); return classpath; } @Override @Nullable public String getPresentableName() { AntBuildModel model = myAntConfiguration.getModelIfRegistered(this); String name = model != null ? model.getName() : null; if(name == null || name.trim().length() == 0) { name = myVFile.getName(); } return name; } @Override @Nullable public String getName() { final VirtualFile vFile = getVirtualFile(); return vFile != null ? vFile.getName() : null; } @Override public AntBuildModelBase getModel() { return (AntBuildModelBase) myAntConfiguration.getModel(this); } @Override @Nullable public AntBuildModelBase getModelIfRegistered() { return (AntBuildModelBase) myAntConfiguration.getModelIfRegistered(this); } @Override public boolean isRunInBackground() { return RUN_IN_BACKGROUND.value(myAllOptions); } @Override @Nullable public XmlFile getAntFile() { final PsiFile psiFile = myVFile.isValid() ? PsiManager.getInstance(getProject()).findFile(myVFile) : null; if(!(psiFile instanceof XmlFile)) { return null; } final XmlFile xmlFile = (XmlFile) psiFile; return AntDomFileDescription.isAntFile(xmlFile) ? xmlFile : null; } @Override public Project getProject() { return myProject; } @Override @Nullable public VirtualFile getVirtualFile() { return myVFile; } @Override public AbstractProperty.AbstractPropertyContainer getAllOptions() { return myAllOptions; } @Override @Nullable public String getPresentableUrl() { final VirtualFile file = getVirtualFile(); return (file == null) ? null : file.getPresentableUrl(); } @Override public boolean shouldExpand() { return myShouldExpand; } @Override public void setShouldExpand(boolean expand) { myShouldExpand = expand; } @Override public boolean isTargetVisible(final AntBuildTarget target) { final TargetFilter filter = findFilter(target.getName()); if(filter == null) { return target.isDefault() || target.getNotEmptyDescription() != null; } return filter.isVisible(); } @Override public boolean exists() { final VirtualFile file = getVirtualFile(); if(file == null || !(new File(file.getPath()).exists())) { return false; } return true; } @Override public void updateProperties() { // do not change position final AntBuildTarget[] targets = getModel().getTargets(); final Map<String, AntBuildTarget> targetByName = new LinkedHashMap<String, AntBuildTarget>(targets.length); for(AntBuildTarget target : targets) { String targetName = target.getName(); if(targetName != null) { targetByName.put(targetName, target); } } synchronized(myOptionsLock) { myCachedExternalProperties = null; final ArrayList<TargetFilter> filters = TARGET_FILTERS.getModifiableList(myAllOptions); for(Iterator<TargetFilter> iterator = filters.iterator(); iterator.hasNext(); ) { final TargetFilter filter = iterator.next(); final String name = filter.getTargetName(); if(name == null) { iterator.remove(); } else { AntBuildTarget target = targetByName.get(name); if(target != null) { filter.updateDescription(target); targetByName.remove(name); } else { iterator.remove(); } } } // handle the rest of targets with non-null names for(AntBuildTarget target : targetByName.values()) { filters.add(TargetFilter.fromTarget(target)); } } } @Override public void updateConfig() { basicUpdateConfig(); DaemonCodeAnalyzer.getInstance(getProject()).restart(); } @Override public void setTreeView(final boolean value) { TREE_VIEW.primSet(myAllOptions, value); } @Override public void setVerboseMode(final boolean value) { VERBOSE.primSet(myAllOptions, value); } @Override public boolean isViewClosedWhenNoErrors() { return CLOSE_ON_NO_ERRORS.value(myAllOptions); } @Override public void readWorkspaceProperties(final Element parentNode) throws InvalidDataException { synchronized(myOptionsLock) { myWorkspaceOptions.readExternal(parentNode); final Element expanded = parentNode.getChild("expanded"); if(expanded != null) { myShouldExpand = Boolean.valueOf(expanded.getAttributeValue("value")); } // don't lose old command line parameters final Element antCommandLine = parentNode.getChild("antCommandLine"); if(antCommandLine != null) { ANT_COMMAND_LINE_PARAMETERS.set(myProjectOptions, antCommandLine.getAttributeValue("value")); } } } @Override public void writeWorkspaceProperties(final Element parentNode) throws WriteExternalException { synchronized(myOptionsLock) { myWorkspaceOptions.writeExternal(parentNode); final Element expandedElem = new Element("expanded"); expandedElem.setAttribute("value", Boolean.toString(myShouldExpand)); parentNode.addContent(expandedElem); } } @Override public void readProperties(final Element parentNode) throws InvalidDataException { synchronized(myOptionsLock) { myProjectOptions.readExternal(parentNode); basicUpdateConfig(); readWorkspaceProperties(parentNode); // Compatibility with old Idea } } @Override public void writeProperties(final Element parentNode) throws WriteExternalException { synchronized(myOptionsLock) { myProjectOptions.writeExternal(parentNode); } } private void basicUpdateConfig() { final XmlFile antFile = getAntFile(); if(antFile != null) { bindAnt(); myClassloaderHolder.updateClasspath(); } } @Override @Nonnull public Map<String, String> getExternalProperties() { Map<String, String> result = myCachedExternalProperties; if(result == null) { synchronized(myOptionsLock) { result = myCachedExternalProperties; if(result == null) { result = new HashMap<String, String>(); final DataContext context = SimpleDataContext.getProjectContext(myProject); final MacroManager macroManager = MacroManager.getInstance(); Iterator<BuildFileProperty> properties = ANT_PROPERTIES.getIterator(myAllOptions); while(properties.hasNext()) { BuildFileProperty property = properties.next(); try { String value = property.getPropertyValue(); value = macroManager.expandSilentMarcos(value, true, context); value = macroManager.expandSilentMarcos(value, false, context); result.put(property.getPropertyName(), value); } catch(Macro.ExecutionCancelledException e) { LOG.debug(e); } } myCachedExternalProperties = result; } } } return result; } private void bindAnt() { ANT_REFERENCE.set(getAllOptions(), ANT_REFERENCE.get(getAllOptions()).bind(GlobalAntConfiguration.getInstance())); } @Nullable private TargetFilter findFilter(final String targetName) { final List<TargetFilter> filters; synchronized(myOptionsLock) { filters = TARGET_FILTERS.get(myAllOptions); } for(TargetFilter targetFilter : filters) { if(Comparing.equal(targetName, targetFilter.getTargetName())) { return targetFilter; } } return null; } @Nonnull public ClassLoader getClassLoader() { return myClassloaderHolder.getClassloader(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.metrics.groups; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.MetricOptions; import org.apache.flink.core.testutils.BlockerSync; import org.apache.flink.metrics.CharacterFilter; import org.apache.flink.metrics.Metric; import org.apache.flink.metrics.MetricGroup; import org.apache.flink.metrics.reporter.MetricReporter; import org.apache.flink.runtime.metrics.MetricRegistry; import org.apache.flink.runtime.metrics.MetricRegistryConfiguration; import org.apache.flink.runtime.metrics.MetricRegistryImpl; import org.apache.flink.runtime.metrics.dump.QueryScopeInfo; import org.apache.flink.runtime.metrics.scope.ScopeFormats; import org.apache.flink.runtime.metrics.util.TestReporter; import org.junit.Test; import javax.annotation.Nullable; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * Tests for the {@link AbstractMetricGroup}. */ public class AbstractMetricGroupTest { /** * Verifies that no {@link NullPointerException} is thrown when {@link AbstractMetricGroup#getAllVariables()} is * called and the parent is null. */ @Test public void testGetAllVariables() throws Exception { MetricRegistryImpl registry = new MetricRegistryImpl(MetricRegistryConfiguration.defaultMetricRegistryConfiguration()); AbstractMetricGroup group = new AbstractMetricGroup<AbstractMetricGroup<?>>(registry, new String[0], null) { @Override protected QueryScopeInfo createQueryServiceMetricInfo(CharacterFilter filter) { return null; } @Override protected String getGroupName(CharacterFilter filter) { return ""; } }; assertTrue(group.getAllVariables().isEmpty()); registry.shutdown().get(); } // ======================================================================== // Scope Caching // ======================================================================== private static final CharacterFilter FILTER_C = new CharacterFilter() { @Override public String filterCharacters(String input) { return input.replace("C", "X"); } }; private static final CharacterFilter FILTER_B = new CharacterFilter() { @Override public String filterCharacters(String input) { return input.replace("B", "X"); } }; @Test public void testScopeCachingForMultipleReporters() throws Exception { Configuration config = new Configuration(); config.setString(MetricOptions.SCOPE_NAMING_TM, "A.B.C.D"); config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test1." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter1.class.getName()); config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test1." + ConfigConstants.METRICS_REPORTER_SCOPE_DELIMITER, "-"); config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test2." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, TestReporter2.class.getName()); config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test2." + ConfigConstants.METRICS_REPORTER_SCOPE_DELIMITER, "!"); MetricRegistryImpl testRegistry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(config)); try { MetricGroup tmGroup = new TaskManagerMetricGroup(testRegistry, "host", "id"); tmGroup.counter("1"); assertEquals("Reporters were not properly instantiated", 2, testRegistry.getReporters().size()); for (MetricReporter reporter : testRegistry.getReporters()) { ScopeCheckingTestReporter typedReporter = (ScopeCheckingTestReporter) reporter; if (typedReporter.failureCause != null) { throw typedReporter.failureCause; } } } finally { testRegistry.shutdown().get(); } } @Test public void testLogicalScopeCachingForMultipleReporters() throws Exception { Configuration config = new Configuration(); config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test1." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, LogicalScopeReporter1.class.getName()); config.setString(ConfigConstants.METRICS_REPORTER_PREFIX + "test2." + ConfigConstants.METRICS_REPORTER_CLASS_SUFFIX, LogicalScopeReporter2.class.getName()); MetricRegistryImpl testRegistry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(config)); try { MetricGroup tmGroup = new TaskManagerMetricGroup(testRegistry, "host", "id") .addGroup("B") .addGroup("C"); tmGroup.counter("1"); assertEquals("Reporters were not properly instantiated", 2, testRegistry.getReporters().size()); for (MetricReporter reporter : testRegistry.getReporters()) { ScopeCheckingTestReporter typedReporter = (ScopeCheckingTestReporter) reporter; if (typedReporter.failureCause != null) { throw typedReporter.failureCause; } } } finally { testRegistry.shutdown().get(); } } private abstract static class ScopeCheckingTestReporter extends TestReporter { protected Exception failureCause; @Override public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) { try { checkScopes(metric, metricName, group); } catch (Exception e) { if (failureCause == null) { failureCause = e; } } } public abstract void checkScopes(Metric metric, String metricName, MetricGroup group); } /** * Reporter that verifies the scope caching behavior. */ public static class TestReporter1 extends ScopeCheckingTestReporter { @Override public String filterCharacters(String input) { return FILTER_B.filterCharacters(input); } @Override public void checkScopes(Metric metric, String metricName, MetricGroup group) { // the first call determines which filter is applied to all future calls; in this case no filter is used at all assertEquals("A-B-C-D-1", group.getMetricIdentifier(metricName)); // from now on the scope string is cached and should not be reliant on the given filter assertEquals("A-B-C-D-1", group.getMetricIdentifier(metricName, FILTER_C)); assertEquals("A-B-C-D-1", group.getMetricIdentifier(metricName, this)); // the metric name however is still affected by the filter as it is not cached assertEquals("A-B-C-D-4", group.getMetricIdentifier(metricName, new CharacterFilter() { @Override public String filterCharacters(String input) { return input.replace("B", "X").replace("1", "4"); } })); } } /** * Reporter that verifies the scope caching behavior. */ public static class TestReporter2 extends ScopeCheckingTestReporter { @Override public String filterCharacters(String input) { return FILTER_C.filterCharacters(input); } @Override public void checkScopes(Metric metric, String metricName, MetricGroup group) { // the first call determines which filter is applied to all future calls assertEquals("A!B!X!D!1", group.getMetricIdentifier(metricName, this)); // from now on the scope string is cached and should not be reliant on the given filter assertEquals("A!B!X!D!1", group.getMetricIdentifier(metricName)); assertEquals("A!B!X!D!1", group.getMetricIdentifier(metricName, FILTER_C)); // the metric name however is still affected by the filter as it is not cached assertEquals("A!B!X!D!3", group.getMetricIdentifier(metricName, new CharacterFilter() { @Override public String filterCharacters(String input) { return input.replace("A", "X").replace("1", "3"); } })); } } /** * Reporter that verifies the logical-scope caching behavior. */ public static final class LogicalScopeReporter1 extends ScopeCheckingTestReporter { @Override public String filterCharacters(String input) { return FILTER_B.filterCharacters(input); } @Override public void checkScopes(Metric metric, String metricName, MetricGroup group) { final String logicalScope = ((FrontMetricGroup<AbstractMetricGroup<?>>) group).getLogicalScope(this, '-'); assertEquals("taskmanager-X-C", logicalScope); } } /** * Reporter that verifies the logical-scope caching behavior. */ public static final class LogicalScopeReporter2 extends ScopeCheckingTestReporter { @Override public String filterCharacters(String input) { return FILTER_C.filterCharacters(input); } @Override public void checkScopes(Metric metric, String metricName, MetricGroup group) { final String logicalScope = ((FrontMetricGroup<AbstractMetricGroup<?>>) group).getLogicalScope(this, ','); assertEquals("taskmanager,B,X", logicalScope); } } @Test public void testScopeGenerationWithoutReporters() throws Exception { Configuration config = new Configuration(); config.setString(MetricOptions.SCOPE_NAMING_TM, "A.B.C.D"); MetricRegistryImpl testRegistry = new MetricRegistryImpl(MetricRegistryConfiguration.fromConfiguration(config)); try { TaskManagerMetricGroup group = new TaskManagerMetricGroup(testRegistry, "host", "id"); assertEquals("MetricReporters list should be empty", 0, testRegistry.getReporters().size()); // default delimiter should be used assertEquals("A.B.X.D.1", group.getMetricIdentifier("1", FILTER_C)); // no caching should occur assertEquals("A.X.C.D.1", group.getMetricIdentifier("1", FILTER_B)); // invalid reporter indices do not throw errors assertEquals("A.X.C.D.1", group.getMetricIdentifier("1", FILTER_B, -1)); assertEquals("A.X.C.D.1", group.getMetricIdentifier("1", FILTER_B, 2)); } finally { testRegistry.shutdown().get(); } } @Test public void testGetAllVariablesDoesNotDeadlock() throws InterruptedException { final TestMetricRegistry registry = new TestMetricRegistry(); final MetricGroup parent = new GenericMetricGroup(registry, UnregisteredMetricGroups.createUnregisteredTaskManagerMetricGroup(), "parent"); final MetricGroup child = parent.addGroup("child"); final Thread parentRegisteringThread = new Thread(() -> parent.counter("parent_counter")); final Thread childRegisteringThread = new Thread(() -> child.counter("child_counter")); final BlockerSync parentSync = new BlockerSync(); final BlockerSync childSync = new BlockerSync(); try { // start both threads and have them block in the registry, so they acquire the lock of their respective group registry.setOnRegistrationAction(childSync::blockNonInterruptible); childRegisteringThread.start(); childSync.awaitBlocker(); registry.setOnRegistrationAction(parentSync::blockNonInterruptible); parentRegisteringThread.start(); parentSync.awaitBlocker(); // the parent thread remains blocked to simulate the child thread holding some lock in the registry/reporter // the child thread continues execution and calls getAllVariables() // in the past this would block indefinitely since the method acquires the locks of all parent groups childSync.releaseBlocker(); // wait with a timeout to ensure the finally block is executed _at some point_, un-blocking the parent childRegisteringThread.join(1000 * 10); parentSync.releaseBlocker(); parentRegisteringThread.join(); } finally { parentSync.releaseBlocker(); childSync.releaseBlocker(); parentRegisteringThread.join(); childRegisteringThread.join(); } } private static final class TestMetricRegistry implements MetricRegistry { private Runnable onRegistrationAction; void setOnRegistrationAction(Runnable onRegistrationAction) { this.onRegistrationAction = onRegistrationAction; } @Override public char getDelimiter() { return 0; } @Override public char getDelimiter(int index) { return 0; } @Override public int getNumberReporters() { return 0; } @Override public void register(Metric metric, String metricName, AbstractMetricGroup group) { onRegistrationAction.run(); group.getAllVariables(); } @Override public void unregister(Metric metric, String metricName, AbstractMetricGroup group) { } @Override public ScopeFormats getScopeFormats() { return null; } @Nullable @Override public String getMetricQueryServicePath() { return null; } } }
/* * Copyright 2017 David Karnok * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package hu.akarnokd.reactive4javaflow.impl.operators; import hu.akarnokd.reactive4javaflow.*; import hu.akarnokd.reactive4javaflow.impl.SubscriptionHelper; import java.util.*; import java.util.concurrent.Flow; import java.util.concurrent.atomic.*; /** * Given sorted rail sequences (according to the provided comparator) as List * emit the smallest item from these parallel Lists to the Subscriber. * <p> * It expects the source to emit exactly one list (which could be empty). * * @param <T> the value type */ public final class ParallelSortedJoin<T> extends Folyam<T> { final ParallelFolyam<List<T>> source; final Comparator<? super T> comparator; public ParallelSortedJoin(ParallelFolyam<List<T>> source, Comparator<? super T> comparator) { this.source = source; this.comparator = comparator; } @Override protected void subscribeActual(FolyamSubscriber<? super T> s) { SortedJoinSubscription<T> parent = new SortedJoinSubscription<>(s, source.parallelism(), comparator); s.onSubscribe(parent); source.subscribe(parent.subscribers); } static final class SortedJoinSubscription<T> extends AtomicInteger implements Flow.Subscription { private static final long serialVersionUID = 3481980673745556697L; final FolyamSubscriber<? super T> actual; final SortedJoinInnerSubscriber<T>[] subscribers; final List<T>[] lists; final int[] indexes; final Comparator<? super T> comparator; final AtomicLong requested = new AtomicLong(); volatile boolean cancelled; final AtomicInteger remaining = new AtomicInteger(); final AtomicReference<Throwable> error = new AtomicReference<>(); @SuppressWarnings("unchecked") SortedJoinSubscription(FolyamSubscriber<? super T> actual, int n, Comparator<? super T> comparator) { this.actual = actual; this.comparator = comparator; SortedJoinInnerSubscriber<T>[] s = new SortedJoinInnerSubscriber[n]; for (int i = 0; i < n; i++) { s[i] = new SortedJoinInnerSubscriber<>(this, i); } this.subscribers = s; this.lists = new List[n]; this.indexes = new int[n]; remaining.lazySet(n); } @Override public void request(long n) { SubscriptionHelper.addRequested(requested, n); if (remaining.get() == 0) { drain(); } } @Override public void cancel() { if (!cancelled) { cancelled = true; cancelAll(); if (getAndIncrement() == 0) { Arrays.fill(lists, null); } } } void cancelAll() { for (SortedJoinInnerSubscriber<T> s : subscribers) { s.cancel(); } } void innerNext(List<T> value, int index) { lists[index] = value; if (remaining.decrementAndGet() == 0) { drain(); } } void innerError(Throwable e) { if (error.compareAndSet(null, e)) { drain(); } else { if (e != error.get()) { FolyamPlugins.onError(e); } } } void drain() { if (getAndIncrement() != 0) { return; } int missed = 1; FolyamSubscriber<? super T> a = actual; List<T>[] lists = this.lists; int[] indexes = this.indexes; int n = indexes.length; for (;;) { long r = requested.get(); long e = 0L; while (e != r) { if (cancelled) { Arrays.fill(lists, null); return; } Throwable ex = error.get(); if (ex != null) { cancelAll(); Arrays.fill(lists, null); a.onError(ex); return; } T min = null; int minIndex = -1; for (int i = 0; i < n; i++) { List<T> list = lists[i]; int index = indexes[i]; if (list.size() != index) { if (min == null) { min = list.get(index); minIndex = i; } else { T b = list.get(index); boolean smaller; try { smaller = comparator.compare(min, b) > 0; } catch (Throwable exc) { FolyamPlugins.handleFatal(exc); cancelAll(); Arrays.fill(lists, null); if (!error.compareAndSet(null, exc)) { FolyamPlugins.onError(exc); } a.onError(error.get()); return; } if (smaller) { min = b; minIndex = i; } } } } if (min == null) { Arrays.fill(lists, null); a.onComplete(); return; } a.onNext(min); indexes[minIndex]++; e++; } if (e == r) { if (cancelled) { Arrays.fill(lists, null); return; } Throwable ex = error.get(); if (ex != null) { cancelAll(); Arrays.fill(lists, null); a.onError(ex); return; } boolean empty = true; for (int i = 0; i < n; i++) { if (indexes[i] != lists[i].size()) { empty = false; break; } } if (empty) { Arrays.fill(lists, null); a.onComplete(); return; } } if (e != 0 && r != Long.MAX_VALUE) { requested.addAndGet(-e); } int w = get(); if (w == missed) { missed = addAndGet(-missed); if (missed == 0) { break; } } else { missed = w; } } } } static final class SortedJoinInnerSubscriber<T> extends AtomicReference<Flow.Subscription> implements FolyamSubscriber<List<T>> { private static final long serialVersionUID = 6751017204873808094L; final SortedJoinSubscription<T> parent; final int index; SortedJoinInnerSubscriber(SortedJoinSubscription<T> parent, int index) { this.parent = parent; this.index = index; } @Override public void onSubscribe(Flow.Subscription s) { if (SubscriptionHelper.replace(this, s)) { s.request(Long.MAX_VALUE); } } @Override public void onNext(List<T> t) { parent.innerNext(t, index); } @Override public void onError(Throwable t) { parent.innerError(t); } @Override public void onComplete() { // ignored } void cancel() { SubscriptionHelper.cancel(this); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.nullif; import java.util.List; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.injection.Injection; import org.pentaho.di.core.injection.InjectionDeep; import org.pentaho.di.core.injection.InjectionSupported; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /* * Created on 05-aug-2003 * */ @InjectionSupported( localizationPrefix = "Injection.NullIf.", groups = { "FIELDS" } ) public class NullIfMeta extends BaseStepMeta implements StepMetaInterface { private static Class<?> PKG = NullIfMeta.class; // for i18n purposes, needed by Translator2!! public static class Field implements Cloneable { @Injection( name = "FIELDNAME", group = "FIELDS" ) private String fieldName; @Injection( name = "FIELDVALUE", group = "FIELDS" ) private String fieldValue; /** * @return Returns the fieldName. */ public String getFieldName() { return fieldName; } /** * @param fieldName * The fieldName to set. */ public void setFieldName( String fieldName ) { this.fieldName = fieldName; } /** * @return Returns the fieldValue. */ public String getFieldValue() { return fieldValue; } /** * @param fieldValue * The fieldValue to set. */ public void setFieldValue( String fieldValue ) { this.fieldValue = fieldValue; } public Field clone() { try { return (Field) super.clone(); } catch ( CloneNotSupportedException e ) { throw new RuntimeException( e ); } } } @InjectionDeep private Field[] fields; public NullIfMeta() { super(); // allocate BaseStepMeta } public Field[] getFields() { return fields; } public void setFields( Field[] fields ) { this.fields = fields; } public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } public void allocate( int count ) { fields = new Field[count]; for ( int i = 0; i < count; i++ ) { fields[i] = new Field(); } } public Object clone() { NullIfMeta retval = (NullIfMeta) super.clone(); int count = fields.length; retval.allocate( count ); for ( int i = 0; i < count; i++ ) { retval.getFields()[i] = fields[i].clone(); } return retval; } private void readData( Node stepnode ) throws KettleXMLException { try { Node fieldNodes = XMLHandler.getSubNode( stepnode, "fields" ); int count = XMLHandler.countNodes( fieldNodes, "field" ); allocate( count ); for ( int i = 0; i < count; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fieldNodes, "field", i ); fields[i].setFieldName( XMLHandler.getTagValue( fnode, "name" ) ); fields[i].setFieldValue( XMLHandler.getTagValue( fnode, "value" ) ); } } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "NullIfMeta.Exception.UnableToReadStepInfoFromXML" ), e ); } } public void setDefault() { int count = 0; allocate( count ); for ( int i = 0; i < count; i++ ) { fields[i].setFieldName( "field" + i ); fields[i].setFieldValue( "" ); } } public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { if ( r == null ) { r = new RowMeta(); // give back values // Meta-data doesn't change here, only the value possibly turns to NULL } return; } public String getXML() { StringBuilder retval = new StringBuilder(); retval.append( " <fields>" + Const.CR ); for ( int i = 0; i < fields.length; i++ ) { retval.append( " <field>" + Const.CR ); retval.append( " " + XMLHandler.addTagValue( "name", fields[i].getFieldName() ) ); retval.append( " " + XMLHandler.addTagValue( "value", fields[i].getFieldValue() ) ); retval.append( " </field>" + Const.CR ); } retval.append( " </fields>" + Const.CR ); return retval.toString(); } public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { fields[i].setFieldName( rep.getStepAttributeString( id_step, i, "field_name" ) ); fields[i].setFieldValue( rep.getStepAttributeString( id_step, i, "field_value" ) ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "NullIfMeta.Exception.UnexpectedErrorReadingStepInfoFromRepository" ), e ); } } public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { for ( int i = 0; i < fields.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "field_name", fields[i].getFieldName() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_value", fields[i].getFieldValue() ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "NullIfMeta.Exception.UnableToSaveStepInfoToRepository" ) + id_step, e ); } } public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { CheckResult cr; if ( prev == null || prev.size() == 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_WARNING, BaseMessages.getString( PKG, "NullIfMeta.CheckResult.NoReceivingFieldsError" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "NullIfMeta.CheckResult.StepReceivingFieldsOK", prev.size() + "" ), stepMeta ); remarks.add( cr ); } // See if we have input streams leading to this step! if ( input.length > 0 ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG, "NullIfMeta.CheckResult.StepRecevingInfoFromOtherSteps" ), stepMeta ); remarks.add( cr ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG, "NullIfMeta.CheckResult.NoInputReceivedError" ), stepMeta ); remarks.add( cr ); } } public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ) { return new NullIf( stepMeta, stepDataInterface, cnr, transMeta, trans ); } public StepDataInterface getStepData() { return new NullIfData(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.runtime.controlprogram.parfor; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.sysml.hops.Hop; import org.apache.sysml.parser.Expression.DataType; import org.apache.sysml.parser.Expression.ValueType; import org.apache.sysml.runtime.DMLRuntimeException; import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PDataPartitionFormat; import org.apache.sysml.runtime.controlprogram.caching.MatrixObject; import org.apache.sysml.runtime.matrix.MatrixCharacteristics; import org.apache.sysml.runtime.matrix.MatrixFormatMetaData; import org.apache.sysml.runtime.matrix.data.InputInfo; import org.apache.sysml.runtime.matrix.data.MatrixBlock; import org.apache.sysml.runtime.matrix.data.OutputInfo; import org.apache.sysml.runtime.util.MapReduceTool; /** * This is the base class for all data partitioner. * */ public abstract class DataPartitioner { protected static final Log LOG = LogFactory.getLog(DataPartitioner.class.getName()); //note: the following value has been empirically determined but might change in the future, //MatrixBlockDSM.SPARCITY_TURN_POINT (with 0.4) was too high because we create 3-4 values per nnz and //have some computation overhead for binary cell. protected static final double SPARSITY_CELL_THRESHOLD = 0.1d; protected static final String NAME_SUFFIX = "_dp"; //instance variables protected PDataPartitionFormat _format = null; protected int _n = -1; //blocksize if applicable protected boolean _allowBinarycell = true; protected DataPartitioner( PDataPartitionFormat dpf, int n ) { _format = dpf; _n = n; } public MatrixObject createPartitionedMatrixObject( MatrixObject in, String fnameNew ) throws DMLRuntimeException { return createPartitionedMatrixObject(in, fnameNew, false); } public MatrixObject createPartitionedMatrixObject( MatrixObject in, String fnameNew, boolean force ) throws DMLRuntimeException { ValueType vt = in.getValueType(); String varname = in.getVarName(); MatrixObject out = new MatrixObject(vt, fnameNew ); out.setDataType( DataType.MATRIX ); out.setVarName( varname+NAME_SUFFIX ); return createPartitionedMatrixObject(in, out, force); } /** * Creates a partitioned matrix object based on the given input matrix object, * according to the specified split format. The input matrix can be in-memory * or still on HDFS and the partitioned output matrix is written to HDFS. The * created matrix object can be used transparently for obtaining the full matrix * or reading 1 or multiple partitions based on given index ranges. * * @param in input matrix object * @param out output matrix object * @param force if false, try to optimize * @return partitioned matrix object * @throws DMLRuntimeException if DMLRuntimeException occurs */ public MatrixObject createPartitionedMatrixObject( MatrixObject in, MatrixObject out, boolean force ) throws DMLRuntimeException { //check for naive partitioning if( _format == PDataPartitionFormat.NONE ) return in; //analyze input matrix object MatrixFormatMetaData meta = (MatrixFormatMetaData)in.getMetaData(); MatrixCharacteristics mc = meta.getMatrixCharacteristics(); InputInfo ii = meta.getInputInfo(); OutputInfo oi = meta.getOutputInfo(); long rows = mc.getRows(); long cols = mc.getCols(); int brlen = mc.getRowsPerBlock(); int bclen = mc.getColsPerBlock(); long nonZeros = mc.getNonZeros(); double sparsity = (nonZeros>=0 && rows>0 && cols>0)? ((double)nonZeros)/(rows*cols) : 1.0; if( !force ) //try to optimize, if format not forced { //check lower bound of useful data partitioning if( rows < Hop.CPThreshold && cols < Hop.CPThreshold ) //or matrix already fits in mem { return in; } //check for changing to blockwise representations if( _format == PDataPartitionFormat.ROW_WISE && cols < Hop.CPThreshold ) { LOG.debug("Changing format from "+PDataPartitionFormat.ROW_WISE+" to "+PDataPartitionFormat.ROW_BLOCK_WISE+"."); _format = PDataPartitionFormat.ROW_BLOCK_WISE; } if( _format == PDataPartitionFormat.COLUMN_WISE && rows < Hop.CPThreshold ) { LOG.debug("Changing format from "+PDataPartitionFormat.COLUMN_WISE+" to "+PDataPartitionFormat.ROW_BLOCK_WISE+"."); _format = PDataPartitionFormat.COLUMN_BLOCK_WISE; } //_format = PDataPartitionFormat.ROW_BLOCK_WISE_N; } //check changing to binarycell in case of sparse cols (robustness) boolean convertBlock2Cell = false; if( ii == InputInfo.BinaryBlockInputInfo && _allowBinarycell && _format == PDataPartitionFormat.COLUMN_WISE && sparsity < SPARSITY_CELL_THRESHOLD ) { LOG.debug("Changing partition outputinfo from binaryblock to binarycell due to sparsity="+sparsity); oi = OutputInfo.BinaryCellOutputInfo; convertBlock2Cell = true; } //prepare filenames and cleanup if required String fnameNew = out.getFileName(); try{ MapReduceTool.deleteFileIfExistOnHDFS(fnameNew); } catch(Exception ex){ throw new DMLRuntimeException( ex ); } //core partitioning (depending on subclass) partitionMatrix( in, fnameNew, ii, oi, rows, cols, brlen, bclen ); //create output matrix object out.setPartitioned( _format, _n ); MatrixCharacteristics mcNew = new MatrixCharacteristics( rows, cols, (int)brlen, (int)bclen ); mcNew.setNonZeros( nonZeros ); if( convertBlock2Cell ) ii = InputInfo.BinaryCellInputInfo; MatrixFormatMetaData metaNew = new MatrixFormatMetaData(mcNew,oi,ii); out.setMetaData(metaNew); return out; } public void disableBinaryCell() { _allowBinarycell = false; } protected abstract void partitionMatrix( MatrixObject in, String fnameNew, InputInfo ii, OutputInfo oi, long rlen, long clen, int brlen, int bclen ) throws DMLRuntimeException; public static MatrixBlock createReuseMatrixBlock( PDataPartitionFormat dpf, int rows, int cols ) { MatrixBlock tmp = null; switch( dpf ) { case ROW_WISE: //default assumption sparse, but reset per input block anyway tmp = new MatrixBlock( 1, (int)cols, true, (int)(cols*0.1) ); break; case COLUMN_WISE: //default dense because single column alwyas below SKINNY_MATRIX_TURN_POINT tmp = new MatrixBlock( (int)rows, 1, false ); break; default: //do nothing } return tmp; } }
package com.xem.mzbcustomerapp.view; import android.content.Context; import android.database.DataSetObservable; import android.database.DataSetObserver; import android.util.AttributeSet; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.Filter; import android.widget.Filterable; import android.widget.FrameLayout; import android.widget.GridView; import android.widget.ListAdapter; import android.widget.WrapperListAdapter; import java.util.ArrayList; /** * Created by xuebing on 15/11/9. */ public class HeaderGridView extends GridView { private static final String TAG = "HeaderGridView"; /** * A class that represents a fixed view in a list, for example a header at the top * or a footer at the bottom. */ private static class FixedViewInfo { /** The view to add to the grid */ public View view; public ViewGroup viewContainer; /** The data backing the view. This is returned from {@link ListAdapter#getItem(int)}. */ public Object data; /** <code>true</code> if the fixed view should be selectable in the grid */ public boolean isSelectable; } private ArrayList<FixedViewInfo> mHeaderViewInfos = new ArrayList<FixedViewInfo>(); private void initHeaderGridView() { super.setClipChildren(false); } public HeaderGridView(Context context) { super(context); initHeaderGridView(); } public HeaderGridView(Context context, AttributeSet attrs) { super(context, attrs); initHeaderGridView(); } public HeaderGridView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); initHeaderGridView(); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); ListAdapter adapter = getAdapter(); if (adapter != null && adapter instanceof HeaderViewGridAdapter) { ((HeaderViewGridAdapter) adapter).setNumColumns(getNumColumns()); } } @Override public void setClipChildren(boolean clipChildren) { // Ignore, since the header rows depend on not being clipped } /** * Add a fixed view to appear at the top of the grid. If addHeaderView is * called more than once, the views will appear in the order they were * added. Views added using this call can take focus if they want. * <p> * NOTE: Call this before calling setAdapter. This is so HeaderGridView can wrap * the supplied cursor with one that will also account for header views. * * @param v The view to add. * @param data Data to associate with this view * @param isSelectable whether the item is selectable */ public void addHeaderView(View v, Object data, boolean isSelectable) { ListAdapter adapter = getAdapter(); if (adapter != null && ! (adapter instanceof HeaderViewGridAdapter)) { throw new IllegalStateException( "Cannot add header view to grid -- setAdapter has already been called."); } FixedViewInfo info = new FixedViewInfo(); FrameLayout fl = new FullWidthFixedViewLayout(getContext()); fl.addView(v); info.view = v; info.viewContainer = fl; info.data = data; info.isSelectable = isSelectable; mHeaderViewInfos.add(info); // in the case of re-adding a header view, or adding one later on, // we need to notify the observer if (adapter != null) { ((HeaderViewGridAdapter) adapter).notifyDataSetChanged(); } } /** * Add a fixed view to appear at the top of the grid. If addHeaderView is * called more than once, the views will appear in the order they were * added. Views added using this call can take focus if they want. * <p> * NOTE: Call this before calling setAdapter. This is so HeaderGridView can wrap * the supplied cursor with one that will also account for header views. * * @param v The view to add. */ public void addHeaderView(View v) { addHeaderView(v, null, true); } public int getHeaderViewCount() { return mHeaderViewInfos.size(); } /** * Removes a previously-added header view. * * @param v The view to remove * @return true if the view was removed, false if the view was not a header * view */ public boolean removeHeaderView(View v) { if (mHeaderViewInfos.size() > 0) { boolean result = false; ListAdapter adapter = getAdapter(); if (adapter != null && ((HeaderViewGridAdapter) adapter).removeHeader(v)) { result = true; } removeFixedViewInfo(v, mHeaderViewInfos); return result; } return false; } private void removeFixedViewInfo(View v, ArrayList<FixedViewInfo> where) { int len = where.size(); for (int i = 0; i < len; ++i) { FixedViewInfo info = where.get(i); if (info.view == v) { where.remove(i); break; } } } @Override public void setAdapter(ListAdapter adapter) { if (mHeaderViewInfos.size() > 0) { HeaderViewGridAdapter hadapter = new HeaderViewGridAdapter(mHeaderViewInfos, adapter); int numColumns = getNumColumns(); if (numColumns > 1) { hadapter.setNumColumns(numColumns); } super.setAdapter(hadapter); } else { super.setAdapter(adapter); } } private class FullWidthFixedViewLayout extends FrameLayout { public FullWidthFixedViewLayout(Context context) { super(context); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int targetWidth = HeaderGridView.this.getMeasuredWidth() - HeaderGridView.this.getPaddingLeft() - HeaderGridView.this.getPaddingRight(); widthMeasureSpec = MeasureSpec.makeMeasureSpec(targetWidth, MeasureSpec.getMode(widthMeasureSpec)); super.onMeasure(widthMeasureSpec, heightMeasureSpec); } } /** * ListAdapter used when a HeaderGridView has header views. This ListAdapter * wraps another one and also keeps track of the header views and their * associated data objects. *<p>This is intended as a base class; you will probably not need to * use this class directly in your own code. */ private static class HeaderViewGridAdapter implements WrapperListAdapter, Filterable { // This is used to notify the container of updates relating to number of columns // or headers changing, which changes the number of placeholders needed private final DataSetObservable mDataSetObservable = new DataSetObservable(); private final ListAdapter mAdapter; private int mNumColumns = 1; // This ArrayList is assumed to NOT be null. ArrayList<FixedViewInfo> mHeaderViewInfos; boolean mAreAllFixedViewsSelectable; private final boolean mIsFilterable; public HeaderViewGridAdapter(ArrayList<FixedViewInfo> headerViewInfos, ListAdapter adapter) { mAdapter = adapter; mIsFilterable = adapter instanceof Filterable; if (headerViewInfos == null) { throw new IllegalArgumentException("headerViewInfos cannot be null"); } mHeaderViewInfos = headerViewInfos; mAreAllFixedViewsSelectable = areAllListInfosSelectable(mHeaderViewInfos); } public int getHeadersCount() { return mHeaderViewInfos.size(); } @Override public boolean isEmpty() { return (mAdapter == null || mAdapter.isEmpty()) && getHeadersCount() == 0; } public void setNumColumns(int numColumns) { if (numColumns < 1) { throw new IllegalArgumentException("Number of columns must be 1 or more"); } if (mNumColumns != numColumns) { mNumColumns = numColumns; notifyDataSetChanged(); } } private boolean areAllListInfosSelectable(ArrayList<FixedViewInfo> infos) { if (infos != null) { for (FixedViewInfo info : infos) { if (!info.isSelectable) { return false; } } } return true; } public boolean removeHeader(View v) { for (int i = 0; i < mHeaderViewInfos.size(); i++) { FixedViewInfo info = mHeaderViewInfos.get(i); if (info.view == v) { mHeaderViewInfos.remove(i); mAreAllFixedViewsSelectable = areAllListInfosSelectable(mHeaderViewInfos); mDataSetObservable.notifyChanged(); return true; } } return false; } @Override public int getCount() { if (mAdapter != null) { return getHeadersCount() * mNumColumns + mAdapter.getCount(); } else { return getHeadersCount() * mNumColumns; } } @Override public boolean areAllItemsEnabled() { if (mAdapter != null) { return mAreAllFixedViewsSelectable && mAdapter.areAllItemsEnabled(); } else { return true; } } @Override public boolean isEnabled(int position) { // Header (negative positions will throw an ArrayIndexOutOfBoundsException) int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns; if (position < numHeadersAndPlaceholders) { return (position % mNumColumns == 0) && mHeaderViewInfos.get(position / mNumColumns).isSelectable; } // Adapter final int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = 0; if (mAdapter != null) { adapterCount = mAdapter.getCount(); if (adjPosition < adapterCount) { return mAdapter.isEnabled(adjPosition); } } throw new ArrayIndexOutOfBoundsException(position); } @Override public Object getItem(int position) { // Header (negative positions will throw an ArrayIndexOutOfBoundsException) int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns; if (position < numHeadersAndPlaceholders) { if (position % mNumColumns == 0) { return mHeaderViewInfos.get(position / mNumColumns).data; } return null; } // Adapter final int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = 0; if (mAdapter != null) { adapterCount = mAdapter.getCount(); if (adjPosition < adapterCount) { return mAdapter.getItem(adjPosition); } } throw new ArrayIndexOutOfBoundsException(position); } @Override public long getItemId(int position) { int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns; if (mAdapter != null && position >= numHeadersAndPlaceholders) { int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = mAdapter.getCount(); if (adjPosition < adapterCount) { return mAdapter.getItemId(adjPosition); } } return -1; } @Override public boolean hasStableIds() { if (mAdapter != null) { return mAdapter.hasStableIds(); } return false; } @Override public View getView(int position, View convertView, ViewGroup parent) { // Header (negative positions will throw an ArrayIndexOutOfBoundsException) int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns ; if (position < numHeadersAndPlaceholders) { View headerViewContainer = mHeaderViewInfos .get(position / mNumColumns).viewContainer; if (position % mNumColumns == 0) { return headerViewContainer; } else { if (convertView == null) { convertView = new View(parent.getContext()); } // We need to do this because GridView uses the height of the last item // in a row to determine the height for the entire row. convertView.setVisibility(View.INVISIBLE); convertView.setMinimumHeight(headerViewContainer.getHeight()); return convertView; } } // Adapter final int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = 0; if (mAdapter != null) { adapterCount = mAdapter.getCount(); if (adjPosition < adapterCount) { return mAdapter.getView(adjPosition, convertView, parent); } } throw new ArrayIndexOutOfBoundsException(position); } @Override public int getItemViewType(int position) { int numHeadersAndPlaceholders = getHeadersCount() * mNumColumns; if (position < numHeadersAndPlaceholders && (position % mNumColumns != 0)) { // Placeholders get the last view type number return mAdapter != null ? mAdapter.getViewTypeCount() : 1; } if (mAdapter != null && position >= numHeadersAndPlaceholders) { int adjPosition = position - numHeadersAndPlaceholders; int adapterCount = mAdapter.getCount(); if (adjPosition < adapterCount) { return mAdapter.getItemViewType(adjPosition); } } return AdapterView.ITEM_VIEW_TYPE_HEADER_OR_FOOTER; } @Override public int getViewTypeCount() { if (mAdapter != null) { return mAdapter.getViewTypeCount() + 1; } return 2; } @Override public void registerDataSetObserver(DataSetObserver observer) { mDataSetObservable.registerObserver(observer); if (mAdapter != null) { mAdapter.registerDataSetObserver(observer); } } @Override public void unregisterDataSetObserver(DataSetObserver observer) { mDataSetObservable.unregisterObserver(observer); if (mAdapter != null) { mAdapter.unregisterDataSetObserver(observer); } } @Override public Filter getFilter() { if (mIsFilterable) { return ((Filterable) mAdapter).getFilter(); } return null; } @Override public ListAdapter getWrappedAdapter() { return mAdapter; } public void notifyDataSetChanged() { mDataSetObservable.notifyChanged(); } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v8.services.stub; import com.google.ads.googleads.v8.resources.FeedItem; import com.google.ads.googleads.v8.services.GetFeedItemRequest; import com.google.ads.googleads.v8.services.MutateFeedItemsRequest; import com.google.ads.googleads.v8.services.MutateFeedItemsResponse; import com.google.api.core.ApiFunction; import com.google.api.core.BetaApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link FeedItemServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li> The default service address (googleads.googleapis.com) and default port (443) are used. * <li> Credentials are acquired automatically through Application Default Credentials. * <li> Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the total timeout of getFeedItem to 30 seconds: * * <pre>{@code * FeedItemServiceStubSettings.Builder feedItemServiceSettingsBuilder = * FeedItemServiceStubSettings.newBuilder(); * feedItemServiceSettingsBuilder * .getFeedItemSettings() * .setRetrySettings( * feedItemServiceSettingsBuilder * .getFeedItemSettings() * .getRetrySettings() * .toBuilder() * .setTotalTimeout(Duration.ofSeconds(30)) * .build()); * FeedItemServiceStubSettings feedItemServiceSettings = feedItemServiceSettingsBuilder.build(); * }</pre> */ @Generated("by gapic-generator-java") public class FeedItemServiceStubSettings extends StubSettings<FeedItemServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder().add("https://www.googleapis.com/auth/adwords").build(); private final UnaryCallSettings<GetFeedItemRequest, FeedItem> getFeedItemSettings; private final UnaryCallSettings<MutateFeedItemsRequest, MutateFeedItemsResponse> mutateFeedItemsSettings; /** Returns the object with the settings used for calls to getFeedItem. */ public UnaryCallSettings<GetFeedItemRequest, FeedItem> getFeedItemSettings() { return getFeedItemSettings; } /** Returns the object with the settings used for calls to mutateFeedItems. */ public UnaryCallSettings<MutateFeedItemsRequest, MutateFeedItemsResponse> mutateFeedItemsSettings() { return mutateFeedItemsSettings; } @BetaApi("A restructuring of stub classes is planned, so this may break in the future") public FeedItemServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcFeedItemServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "googleads.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "googleads.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } @BetaApi("The surface for customizing headers is not stable yet and may change in the future.") public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(FeedItemServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected FeedItemServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); getFeedItemSettings = settingsBuilder.getFeedItemSettings().build(); mutateFeedItemsSettings = settingsBuilder.mutateFeedItemsSettings().build(); } /** Builder for FeedItemServiceStubSettings. */ public static class Builder extends StubSettings.Builder<FeedItemServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<GetFeedItemRequest, FeedItem> getFeedItemSettings; private final UnaryCallSettings.Builder<MutateFeedItemsRequest, MutateFeedItemsResponse> mutateFeedItemsSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf( Lists.<StatusCode.Code>newArrayList( StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(3600000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(3600000L)) .setTotalTimeout(Duration.ofMillis(3600000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); getFeedItemSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); mutateFeedItemsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( getFeedItemSettings, mutateFeedItemsSettings); initDefaults(this); } protected Builder(FeedItemServiceStubSettings settings) { super(settings); getFeedItemSettings = settings.getFeedItemSettings.toBuilder(); mutateFeedItemsSettings = settings.mutateFeedItemsSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( getFeedItemSettings, mutateFeedItemsSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setEndpoint(getDefaultEndpoint()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .getFeedItemSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .mutateFeedItemsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to getFeedItem. */ public UnaryCallSettings.Builder<GetFeedItemRequest, FeedItem> getFeedItemSettings() { return getFeedItemSettings; } /** Returns the builder for the settings used for calls to mutateFeedItems. */ public UnaryCallSettings.Builder<MutateFeedItemsRequest, MutateFeedItemsResponse> mutateFeedItemsSettings() { return mutateFeedItemsSettings; } @Override public FeedItemServiceStubSettings build() throws IOException { return new FeedItemServiceStubSettings(this); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myfaces.shared.renderkit; import java.io.IOException; import java.io.StringWriter; import javax.el.ELContext; import javax.faces.application.Application; import javax.faces.application.ProjectStage; import javax.faces.application.Resource; import javax.faces.application.ResourceHandler; import javax.faces.component.UIComponent; import javax.faces.component.UIInput; import javax.faces.component.UIOutput; import javax.faces.component.UIPanel; import javax.faces.component.html.HtmlGraphicImage; import javax.faces.context.FacesContext; import junit.framework.Assert; import org.apache.myfaces.shared.renderkit.html.HTML; import org.apache.myfaces.test.base.AbstractJsfTestCase; import org.apache.myfaces.test.mock.MockResponseWriter; import org.easymock.classextension.EasyMock; import org.junit.Test; public class RendererUtilsTest extends AbstractJsfTestCase { private MockResponseWriter writer; /** * ResourceHandler nice easy mock */ ResourceHandler resourceHandlerMock; /** * {@link Application} nice easy mock */ Application applicationMock; /** * A {@link Resource} nice easy mock */ private Resource resourceMock; String libraryName = "images"; String resourceName = "picture.gif"; String requestPath = "/somePrefix/faces/javax.faces.resource/picture.gif?ln=\"images\""; // a Component instance: HtmlGraphicImage graphicImage = new HtmlGraphicImage(); private UIPanel parent; public RendererUtilsTest(String name) { super(name); } protected void setUp() throws Exception { super.setUp(); writer = new MockResponseWriter(new StringWriter(), null, null); facesContext.setResponseWriter(writer); applicationMock = EasyMock.createNiceMock(Application.class); facesContext.setApplication(applicationMock); resourceHandlerMock = EasyMock.createNiceMock(ResourceHandler.class); applicationMock.getResourceHandler(); EasyMock.expectLastCall().andReturn(resourceHandlerMock); applicationMock.getProjectStage(); EasyMock.expectLastCall().andReturn(ProjectStage.Development); resourceMock = EasyMock.createNiceMock(Resource.class); EasyMock.replay(applicationMock); graphicImage.getAttributes().put(JSFAttr.LIBRARY_ATTR, libraryName); graphicImage.getAttributes().put(JSFAttr.NAME_ATTR, resourceName); graphicImage.setId("graphicImageId"); parent = new UIPanel(); } protected void tearDown() throws Exception { super.tearDown(); } /** * */ public void testGetIconSrc() { // Training a mock: resourceHandlerMock.createResource(resourceName, libraryName); EasyMock.expectLastCall().andReturn(resourceMock); resourceMock.getRequestPath(); EasyMock.expectLastCall().andReturn(requestPath); EasyMock.replay(resourceHandlerMock); EasyMock.replay(resourceMock); // Tested method : String iconSrc = RendererUtils.getIconSrc(facesContext, graphicImage, HTML.IMG_ELEM); assertEquals( "If name or name/library present, source must be obtained from ResourceHandler", requestPath, iconSrc); } public void testGetIconSrcResourceNotFound() throws Exception { // Training a mock: EasyMock.reset(resourceHandlerMock); resourceHandlerMock.createResource(resourceName, libraryName); EasyMock.expectLastCall().andReturn(null); EasyMock.replay(resourceHandlerMock); // Tested method : String iconSrc = RendererUtils.getIconSrc(facesContext, graphicImage, HTML.IMG_ELEM); assertEquals("RES_NOT_FOUND", iconSrc); assertTrue("If resource is not found, a Message must be added", facesContext.getMessages(graphicImage.getClientId(facesContext)).hasNext()); } public void testGetStringValue() { // Test for situation where submittedValue IS NOT String: UIInput uiInput = new UIInput(); Object submittedValue = new Object(); uiInput.setSubmittedValue(submittedValue); String stringValue = RendererUtils.getStringValue(facesContext, uiInput); assertNotNull(stringValue); assertEquals("If submittedvalue is not String, toString() must be used", submittedValue.toString(), stringValue); } public void testGetConvertedUIOutputValue() { UIInput uiInput = new UIInput(); StringBuilder submittedValue = new StringBuilder("submittedValue"); uiInput.setSubmittedValue(submittedValue); Object convertedUIOutputValue = RendererUtils.getConvertedUIOutputValue(facesContext, uiInput, submittedValue); assertEquals("If submittedvalue is not String, toString() must be used", submittedValue.toString(), convertedUIOutputValue); } /** * test for MYFACES-3126 */ @Test public void testRenderChild() throws IOException { UIInput uiInput = _setUpComponentStack(); RendererUtils.renderChild(facesContext, uiInput); assertEquals("Invocation must not change the current component", parent, UIComponent.getCurrentComponent(facesContext)); } /** * Test that no method encode* are called if component is not rendered */ @Test public void testRenderChild2() throws IOException { MockComponent component = new MockComponent(); RendererUtils.renderChild(facesContext, component); } @Test public void testIsRendered() { UIComponent uiComponent = new UIOutput(); boolean rendered = RendererUtils.isRendered(facesContext, uiComponent); assertTrue(rendered); uiComponent.setRendered(false); rendered = RendererUtils.isRendered(facesContext, uiComponent); assertFalse(rendered); uiComponent = _setUpComponentStack(); rendered = RendererUtils.isRendered(facesContext, uiComponent); assertFalse(rendered); assertEquals("isRendered must not change current component", parent, UIComponent.getCurrentComponent(facesContext)); } /** * Verifies the current component on stack */ private class MockRenderedValueExpression extends org.apache.myfaces.test.el.MockValueExpression { private final UIComponent toVerify; public MockRenderedValueExpression(String expression, Class<?> expectedType, UIComponent toVerify) { super(expression, expectedType); this.toVerify = toVerify; } @Override public Object getValue(ELContext elContext) { UIComponent currentComponent = UIComponent.getCurrentComponent(facesContext); Assert.assertEquals("If this VE is called, component on stack must be actual" , currentComponent , toVerify); return false; } } /** Verifies no call to encode* methods */ private class MockComponent extends UIOutput { @Override public boolean isRendered() { return false; } @Override public void encodeBegin(FacesContext context) throws IOException { fail(); } @Override public void encodeChildren(FacesContext context) throws IOException { fail(); } @Override public void encodeEnd(FacesContext context) throws IOException { fail(); } } private UIInput _setUpComponentStack() { UIInput uiInput = new UIInput(); parent.getChildren().add(uiInput); uiInput.setId("testId"); MockRenderedValueExpression ve = new MockRenderedValueExpression("#{component.id eq 'testId'}", Boolean.class, uiInput); uiInput.setValueExpression("rendered", ve); // simlulate that parent panel encodes children and is on the stack: parent.pushComponentToEL(facesContext, null); return uiInput; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.jobmaster; import org.apache.flink.api.common.ExecutionConfig; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.JobStatus; import org.apache.flink.api.common.functions.AggregateFunction; import org.apache.flink.api.common.io.DefaultInputSplitAssigner; import org.apache.flink.api.common.restartstrategy.RestartStrategies; import org.apache.flink.api.common.time.Deadline; import org.apache.flink.api.common.time.Time; import org.apache.flink.api.java.ClosureCleaner; import org.apache.flink.api.java.tuple.Tuple3; import org.apache.flink.configuration.BlobServerOptions; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.configuration.RestartStrategyOptions; import org.apache.flink.core.execution.SavepointFormatType; import org.apache.flink.core.io.InputSplit; import org.apache.flink.core.io.InputSplitAssigner; import org.apache.flink.core.io.InputSplitSource; import org.apache.flink.core.testutils.OneShotLatch; import org.apache.flink.runtime.checkpoint.CheckpointProperties; import org.apache.flink.runtime.checkpoint.CheckpointRecoveryFactory; import org.apache.flink.runtime.checkpoint.CheckpointRetentionPolicy; import org.apache.flink.runtime.checkpoint.CheckpointsCleaner; import org.apache.flink.runtime.checkpoint.CompletedCheckpoint; import org.apache.flink.runtime.checkpoint.PerJobCheckpointRecoveryFactory; import org.apache.flink.runtime.checkpoint.StandaloneCheckpointRecoveryFactory; import org.apache.flink.runtime.checkpoint.StandaloneCompletedCheckpointStore; import org.apache.flink.runtime.clusterframework.types.AllocationID; import org.apache.flink.runtime.clusterframework.types.ResourceID; import org.apache.flink.runtime.clusterframework.types.ResourceProfile; import org.apache.flink.runtime.concurrent.ComponentMainThreadExecutor; import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor; import org.apache.flink.runtime.deployment.TaskDeploymentDescriptor; import org.apache.flink.runtime.execution.ExecutionState; import org.apache.flink.runtime.executiongraph.AccessExecution; import org.apache.flink.runtime.executiongraph.AccessExecutionVertex; import org.apache.flink.runtime.executiongraph.ArchivedExecutionGraph; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.executiongraph.failover.flip1.FailoverStrategyFactoryLoader; import org.apache.flink.runtime.heartbeat.HeartbeatServices; import org.apache.flink.runtime.heartbeat.TestingHeartbeatServices; import org.apache.flink.runtime.highavailability.TestingHighAvailabilityServices; import org.apache.flink.runtime.instance.SimpleSlotContext; import org.apache.flink.runtime.io.network.partition.ResultPartitionID; import org.apache.flink.runtime.io.network.partition.ResultPartitionType; import org.apache.flink.runtime.io.network.partition.TestingJobMasterPartitionTracker; import org.apache.flink.runtime.jobgraph.DistributionPattern; import org.apache.flink.runtime.jobgraph.IntermediateDataSetID; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.JobGraphBuilder; import org.apache.flink.runtime.jobgraph.JobGraphTestUtils; import org.apache.flink.runtime.jobgraph.JobVertex; import org.apache.flink.runtime.jobgraph.JobVertexID; import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.jobmanager.PartitionProducerDisposedException; import org.apache.flink.runtime.jobmanager.slots.TaskManagerGateway; import org.apache.flink.runtime.jobmaster.slotpool.PhysicalSlot; import org.apache.flink.runtime.jobmaster.slotpool.SlotInfoWithUtilization; import org.apache.flink.runtime.jobmaster.slotpool.SlotPool; import org.apache.flink.runtime.jobmaster.slotpool.SlotPoolService; import org.apache.flink.runtime.jobmaster.slotpool.SlotPoolServiceFactory; import org.apache.flink.runtime.jobmaster.slotpool.TestingSlotPoolServiceBuilder; import org.apache.flink.runtime.jobmaster.utils.JobMasterBuilder; import org.apache.flink.runtime.leaderretrieval.SettableLeaderRetrievalService; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.registration.RegistrationResponse; import org.apache.flink.runtime.resourcemanager.ResourceManagerGateway; import org.apache.flink.runtime.resourcemanager.ResourceManagerId; import org.apache.flink.runtime.resourcemanager.utils.TestingResourceManagerGateway; import org.apache.flink.runtime.rpc.RpcUtils; import org.apache.flink.runtime.rpc.TestingRpcService; import org.apache.flink.runtime.rpc.exceptions.RecipientUnreachableException; import org.apache.flink.runtime.scheduler.DefaultSchedulerFactory; import org.apache.flink.runtime.scheduler.ExecutionGraphInfo; import org.apache.flink.runtime.scheduler.TestingSchedulerNG; import org.apache.flink.runtime.scheduler.TestingSchedulerNGFactory; import org.apache.flink.runtime.state.CompletedCheckpointStorageLocation; import org.apache.flink.runtime.state.StreamStateHandle; import org.apache.flink.runtime.taskexecutor.TaskExecutorGateway; import org.apache.flink.runtime.taskexecutor.TaskExecutorToJobManagerHeartbeatPayload; import org.apache.flink.runtime.taskexecutor.TestingTaskExecutorGateway; import org.apache.flink.runtime.taskexecutor.TestingTaskExecutorGatewayBuilder; import org.apache.flink.runtime.taskexecutor.slot.SlotOffer; import org.apache.flink.runtime.taskmanager.LocalUnresolvedTaskManagerLocation; import org.apache.flink.runtime.taskmanager.TaskExecutionState; import org.apache.flink.runtime.taskmanager.TaskManagerLocation; import org.apache.flink.runtime.taskmanager.UnresolvedTaskManagerLocation; import org.apache.flink.runtime.testtasks.NoOpInvokable; import org.apache.flink.runtime.testutils.CommonTestUtils; import org.apache.flink.runtime.util.TestingFatalErrorHandler; import org.apache.flink.testutils.TestingUtils; import org.apache.flink.testutils.junit.FailsWithAdaptiveScheduler; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.FlinkRuntimeException; import org.apache.flink.util.InstantiationUtil; import org.apache.flink.util.TestLogger; import org.apache.flink.util.TimeUtils; import org.apache.flink.util.concurrent.FutureUtils; import org.hamcrest.Matchers; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TemporaryFolder; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.io.File; import java.io.IOException; import java.time.Duration; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Queue; import java.util.UUID; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.junit.Assert.fail; /** Tests for {@link JobMaster}. */ public class JobMasterTest extends TestLogger { private static final TestingInputSplit[] EMPTY_TESTING_INPUT_SPLITS = new TestingInputSplit[0]; @ClassRule public static TemporaryFolder temporaryFolder = new TemporaryFolder(); private static final Time testingTimeout = Time.seconds(10L); private static final long fastHeartbeatInterval = 1L; private static final long fastHeartbeatTimeout = 10L; private static final long heartbeatInterval = 1000L; private static final long heartbeatTimeout = 5_000_000L; private static final JobGraph jobGraph = JobGraphTestUtils.singleNoOpJobGraph(); private static TestingRpcService rpcService; private static HeartbeatServices fastHeartbeatServices; private static HeartbeatServices heartbeatServices; private Configuration configuration; private ResourceID jmResourceId; private JobMasterId jobMasterId; private TestingHighAvailabilityServices haServices; private SettableLeaderRetrievalService rmLeaderRetrievalService; private TestingFatalErrorHandler testingFatalErrorHandler; @BeforeClass public static void setupClass() { rpcService = new TestingRpcService(); fastHeartbeatServices = new HeartbeatServices(fastHeartbeatInterval, fastHeartbeatTimeout, -1); heartbeatServices = new HeartbeatServices(heartbeatInterval, heartbeatTimeout, 1); } @Before public void setup() throws IOException { configuration = new Configuration(); haServices = new TestingHighAvailabilityServices(); jobMasterId = JobMasterId.generate(); jmResourceId = ResourceID.generate(); testingFatalErrorHandler = new TestingFatalErrorHandler(); haServices.setCheckpointRecoveryFactory(new StandaloneCheckpointRecoveryFactory()); rmLeaderRetrievalService = new SettableLeaderRetrievalService(null, null); haServices.setResourceManagerLeaderRetriever(rmLeaderRetrievalService); configuration.setString( BlobServerOptions.STORAGE_DIRECTORY, temporaryFolder.newFolder().getAbsolutePath()); } @After public void teardown() throws Exception { if (testingFatalErrorHandler != null) { testingFatalErrorHandler.rethrowError(); } rpcService.clearGateways(); } @AfterClass public static void teardownClass() { if (rpcService != null) { rpcService.stopService(); rpcService = null; } } @Test public void testTaskManagerRegistrationTriggersHeartbeating() throws Exception { final CompletableFuture<ResourceID> heartbeatResourceIdFuture = new CompletableFuture<>(); final UnresolvedTaskManagerLocation unresolvedTaskManagerLocation = new LocalUnresolvedTaskManagerLocation(); final TestingTaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setHeartbeatJobManagerFunction( (taskManagerId, ignored) -> { heartbeatResourceIdFuture.complete(taskManagerId); return FutureUtils.completedVoidFuture(); }) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withResourceId(jmResourceId) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(new HeartbeatServices(1L, 10000L)) .createJobMaster(); jobMaster.start(); try { final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); // register task manager will trigger monitor heartbeat target, schedule heartbeat // request at interval time CompletableFuture<RegistrationResponse> registrationResponse = jobMasterGateway.registerTaskManager( jobGraph.getJobID(), TaskManagerRegistrationInformation.create( taskExecutorGateway.getAddress(), unresolvedTaskManagerLocation, TestingUtils.zeroUUID()), testingTimeout); // wait for the completion of the registration registrationResponse.get(); assertThat(heartbeatResourceIdFuture.join(), anyOf(nullValue(), equalTo(jmResourceId))); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testHeartbeatTimeoutWithTaskManager() throws Exception { runHeartbeatTest( new TestingTaskExecutorGatewayBuilder() .setHeartbeatJobManagerFunction( (taskManagerId, ignored) -> FutureUtils.completedVoidFuture()), fastHeartbeatServices); } private void runHeartbeatTest( TestingTaskExecutorGatewayBuilder testingTaskExecutorGatewayBuilder, HeartbeatServices heartbeatServices) throws Exception { final CompletableFuture<JobID> disconnectedJobManagerFuture = new CompletableFuture<>(); final UnresolvedTaskManagerLocation unresolvedTaskManagerLocation = new LocalUnresolvedTaskManagerLocation(); final TestingTaskExecutorGateway taskExecutorGateway = testingTaskExecutorGatewayBuilder .setDisconnectJobManagerConsumer( (jobId, throwable) -> disconnectedJobManagerFuture.complete(jobId)) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withResourceId(jmResourceId) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster(); jobMaster.start(); try { final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); // register task manager will trigger monitor heartbeat target, schedule heartbeat // request at interval time CompletableFuture<RegistrationResponse> registrationResponse = jobMasterGateway.registerTaskManager( jobGraph.getJobID(), TaskManagerRegistrationInformation.create( taskExecutorGateway.getAddress(), unresolvedTaskManagerLocation, TestingUtils.zeroUUID()), testingTimeout); // wait for the completion of the registration registrationResponse.get(); final JobID disconnectedJobManager = disconnectedJobManagerFuture.get( testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(disconnectedJobManager, equalTo(jobGraph.getJobID())); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testTaskManagerBecomesUnreachableTriggersDisconnect() throws Exception { runHeartbeatTest( new TestingTaskExecutorGatewayBuilder() .setHeartbeatJobManagerFunction( (taskManagerId, ignored) -> FutureUtils.completedExceptionally( new RecipientUnreachableException( "sender", "recipient", "test heartbeat target is unreachable"))), heartbeatServices); } /** * Tests that the {@link AllocatedSlotReport} contains up to date information and not stale * information about the allocated slots on the {@link JobMaster}. * * <p>This is a probabilistic test case which only fails if executed repeatedly without the fix * for FLINK-12863. */ @Test public void testAllocatedSlotReportDoesNotContainStaleInformation() throws Exception { final CompletableFuture<Void> assertionFuture = new CompletableFuture<>(); final UnresolvedTaskManagerLocation unresolvedTaskManagerLocation = new LocalUnresolvedTaskManagerLocation(); final AtomicBoolean terminateHeartbeatVerification = new AtomicBoolean(false); final OneShotLatch hasReceivedSlotOffers = new OneShotLatch(); final TestingTaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setHeartbeatJobManagerFunction( (taskManagerId, allocatedSlotReport) -> { try { if (hasReceivedSlotOffers.isTriggered()) { assertThat( allocatedSlotReport.getAllocatedSlotInfos(), hasSize(1)); } else { assertThat( allocatedSlotReport.getAllocatedSlotInfos(), empty()); } } catch (AssertionError e) { assertionFuture.completeExceptionally(e); } if (terminateHeartbeatVerification.get()) { assertionFuture.complete(null); } return FutureUtils.completedVoidFuture(); }) .createTestingTaskExecutorGateway(); rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobGraph jobGraph = JobGraphTestUtils.singleNoOpJobGraph(); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withHeartbeatServices(new HeartbeatServices(5L, 1000L)) .withSlotPoolServiceSchedulerFactory( DefaultSlotPoolServiceSchedulerFactory.create( new TestingSlotPoolFactory(hasReceivedSlotOffers), new DefaultSchedulerFactory())) .createJobMaster(); jobMaster.start(); try { final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); // register task manager will trigger monitor heartbeat target, schedule heartbeat // request at interval time CompletableFuture<RegistrationResponse> registrationResponse = jobMasterGateway.registerTaskManager( jobGraph.getJobID(), TaskManagerRegistrationInformation.create( taskExecutorGateway.getAddress(), unresolvedTaskManagerLocation, TestingUtils.zeroUUID()), testingTimeout); // wait for the completion of the registration registrationResponse.get(); final SlotOffer slotOffer = new SlotOffer(new AllocationID(), 0, ResourceProfile.ANY); final CompletableFuture<Collection<SlotOffer>> slotOfferFuture = jobMasterGateway.offerSlots( unresolvedTaskManagerLocation.getResourceID(), Collections.singleton(slotOffer), testingTimeout); assertThat(slotOfferFuture.get(), containsInAnyOrder(slotOffer)); terminateHeartbeatVerification.set(true); // make sure that no assertion has been violated assertionFuture.get(); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); jobManagerSharedServices.shutdown(); } } private static final class TestingSlotPoolFactory implements SlotPoolServiceFactory { private final OneShotLatch hasReceivedSlotOffers; public TestingSlotPoolFactory(OneShotLatch hasReceivedSlotOffers) { this.hasReceivedSlotOffers = hasReceivedSlotOffers; } @Nonnull @Override public SlotPoolService createSlotPoolService(@Nonnull JobID jobId) { return new TestingSlotPool(jobId, hasReceivedSlotOffers); } } private static final class TestingSlotPool implements SlotPool, SlotPoolService { private final JobID jobId; private final OneShotLatch hasReceivedSlotOffers; private final Map<ResourceID, Collection<SlotInfo>> registeredSlots; private TestingSlotPool(JobID jobId, OneShotLatch hasReceivedSlotOffers) { this.jobId = jobId; this.hasReceivedSlotOffers = hasReceivedSlotOffers; this.registeredSlots = new HashMap<>(16); } @Override public void start( JobMasterId jobMasterId, String newJobManagerAddress, ComponentMainThreadExecutor jmMainThreadScheduledExecutor) {} @Override public void close() { clear(); } private void clear() { registeredSlots.clear(); } @Override public void connectToResourceManager(ResourceManagerGateway resourceManagerGateway) { throw new UnsupportedOperationException( "TestingSlotPool does not support this operation."); } @Override public void disconnectResourceManager() { throw new UnsupportedOperationException( "TestingSlotPool does not support this operation."); } @Override public boolean registerTaskManager(ResourceID resourceID) { registeredSlots.computeIfAbsent(resourceID, ignored -> new ArrayList<>(16)); return true; } @Override public boolean releaseTaskManager(ResourceID resourceId, Exception cause) { registeredSlots.remove(resourceId); return true; } @Override public Collection<SlotOffer> offerSlots( TaskManagerLocation taskManagerLocation, TaskManagerGateway taskManagerGateway, Collection<SlotOffer> offers) { hasReceivedSlotOffers.trigger(); final Collection<SlotInfo> slotInfos = Optional.ofNullable(registeredSlots.get(taskManagerLocation.getResourceID())) .orElseThrow( () -> new FlinkRuntimeException("TaskManager not registered.")); int slotIndex = slotInfos.size(); for (SlotOffer offer : offers) { slotInfos.add( new SimpleSlotContext( offer.getAllocationId(), taskManagerLocation, slotIndex, taskManagerGateway)); slotIndex++; } return offers; } @Override public Optional<ResourceID> failAllocation( @Nullable ResourceID resourceID, AllocationID allocationId, Exception cause) { throw new UnsupportedOperationException( "TestingSlotPool does not support this operation."); } @Nonnull @Override public Collection<SlotInfoWithUtilization> getAvailableSlotsInformation() { final Collection<SlotInfoWithUtilization> allSlotInfos = registeredSlots.values().stream() .flatMap(Collection::stream) .map(slot -> SlotInfoWithUtilization.from(slot, 0)) .collect(Collectors.toList()); return Collections.unmodifiableCollection(allSlotInfos); } @Override public Collection<SlotInfo> getAllocatedSlotsInformation() { return Collections.emptyList(); } @Override public Optional<PhysicalSlot> allocateAvailableSlot( @Nonnull SlotRequestId slotRequestId, @Nonnull AllocationID allocationID, @Nonnull ResourceProfile requirementProfile) { throw new UnsupportedOperationException( "TestingSlotPool does not support this operation."); } @Nonnull @Override public CompletableFuture<PhysicalSlot> requestNewAllocatedSlot( @Nonnull SlotRequestId slotRequestId, @Nonnull ResourceProfile resourceProfile, @Nonnull Collection<AllocationID> preferredAllocations, @Nullable Time timeout) { return new CompletableFuture<>(); } @Nonnull @Override public CompletableFuture<PhysicalSlot> requestNewAllocatedBatchSlot( @Nonnull SlotRequestId slotRequestId, @Nonnull ResourceProfile resourceProfile, @Nonnull Collection<AllocationID> preferredAllocations) { return new CompletableFuture<>(); } @Override public void disableBatchSlotRequestTimeoutCheck() { // no action and no exception is expected } @Override public AllocatedSlotReport createAllocatedSlotReport(ResourceID taskManagerId) { final Collection<SlotInfo> slotInfos = registeredSlots.getOrDefault(taskManagerId, Collections.emptyList()); final List<AllocatedSlotInfo> allocatedSlotInfos = slotInfos.stream() .map( slotInfo -> new AllocatedSlotInfo( slotInfo.getPhysicalSlotNumber(), slotInfo.getAllocationId())) .collect(Collectors.toList()); return new AllocatedSlotReport(jobId, allocatedSlotInfos); } @Override public void setIsJobRestarting(boolean isJobRestarting) {} @Override public void releaseSlot(@Nonnull SlotRequestId slotRequestId, @Nullable Throwable cause) { throw new UnsupportedOperationException( "TestingSlotPool does not support this operation."); } } @Test public void testHeartbeatTimeoutWithResourceManager() throws Exception { final String resourceManagerAddress = "rm"; final ResourceManagerId resourceManagerId = ResourceManagerId.generate(); final ResourceID rmResourceId = new ResourceID(resourceManagerAddress); final TestingResourceManagerGateway resourceManagerGateway = new TestingResourceManagerGateway( resourceManagerId, rmResourceId, resourceManagerAddress, "localhost"); final CompletableFuture<Tuple3<JobMasterId, ResourceID, JobID>> jobManagerRegistrationFuture = new CompletableFuture<>(); final CompletableFuture<JobID> disconnectedJobManagerFuture = new CompletableFuture<>(); final CountDownLatch registrationAttempts = new CountDownLatch(2); resourceManagerGateway.setRegisterJobManagerFunction( (jobMasterId, resourceID, s, jobID) -> { jobManagerRegistrationFuture.complete( Tuple3.of(jobMasterId, resourceID, jobID)); registrationAttempts.countDown(); return CompletableFuture.completedFuture( resourceManagerGateway.getJobMasterRegistrationSuccess()); }); resourceManagerGateway.setDisconnectJobManagerConsumer( tuple -> disconnectedJobManagerFuture.complete(tuple.f0)); rpcService.registerGateway(resourceManagerAddress, resourceManagerGateway); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withJobMasterId(jobMasterId) .withResourceId(jmResourceId) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(fastHeartbeatServices) .createJobMaster(); jobMaster.start(); try { // define a leader and see that a registration happens rmLeaderRetrievalService.notifyListener( resourceManagerAddress, resourceManagerId.toUUID()); // register job manager success will trigger monitor heartbeat target between jm and rm final Tuple3<JobMasterId, ResourceID, JobID> registrationInformation = jobManagerRegistrationFuture.get( testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); assertThat(registrationInformation.f0, Matchers.equalTo(jobMasterId)); assertThat(registrationInformation.f1, Matchers.equalTo(jmResourceId)); assertThat(registrationInformation.f2, Matchers.equalTo(jobGraph.getJobID())); final JobID disconnectedJobManager = disconnectedJobManagerFuture.get( testingTimeout.toMilliseconds(), TimeUnit.MILLISECONDS); // heartbeat timeout should trigger disconnect JobManager from ResourceManager assertThat(disconnectedJobManager, Matchers.equalTo(jobGraph.getJobID())); // the JobMaster should try to reconnect to the RM registrationAttempts.await(); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testResourceManagerBecomesUnreachableTriggersDisconnect() throws Exception { final String resourceManagerAddress = "rm"; final ResourceManagerId resourceManagerId = ResourceManagerId.generate(); final ResourceID rmResourceId = new ResourceID(resourceManagerAddress); final TestingResourceManagerGateway resourceManagerGateway = new TestingResourceManagerGateway( resourceManagerId, rmResourceId, resourceManagerAddress, "localhost"); final CompletableFuture<JobID> disconnectedJobManagerFuture = new CompletableFuture<>(); final CountDownLatch registrationAttempts = new CountDownLatch(2); final Queue<CompletableFuture<RegistrationResponse>> connectionResponses = new ArrayDeque<>(2); connectionResponses.add( CompletableFuture.completedFuture( resourceManagerGateway.getJobMasterRegistrationSuccess())); connectionResponses.add(new CompletableFuture<>()); resourceManagerGateway.setRegisterJobManagerFunction( (jobMasterId, resourceID, s, jobID) -> { registrationAttempts.countDown(); return connectionResponses.poll(); }); resourceManagerGateway.setDisconnectJobManagerConsumer( tuple -> disconnectedJobManagerFuture.complete(tuple.f0)); resourceManagerGateway.setJobMasterHeartbeatFunction( ignored -> FutureUtils.completedExceptionally( new RecipientUnreachableException( "sender", "recipient", "resource manager is unreachable"))); rpcService.registerGateway(resourceManagerAddress, resourceManagerGateway); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withJobMasterId(jobMasterId) .withResourceId(jmResourceId) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster(); jobMaster.start(); try { // define a leader and see that a registration happens rmLeaderRetrievalService.notifyListener( resourceManagerAddress, resourceManagerId.toUUID()); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); CommonTestUtils.waitUntilCondition( () -> { jobMasterGateway.heartbeatFromResourceManager(rmResourceId); return disconnectedJobManagerFuture.isDone(); }, Deadline.fromNow(TimeUtils.toDuration(testingTimeout)), 50L); // heartbeat timeout should trigger disconnect JobManager from ResourceManager assertThat(disconnectedJobManagerFuture.join(), equalTo(jobGraph.getJobID())); // the JobMaster should try to reconnect to the RM registrationAttempts.await(); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that a JobMaster will restore the given JobGraph from its savepoint upon initial * submission. */ @Test public void testRestoringFromSavepoint() throws Exception { // create savepoint data final long savepointId = 42L; final File savepointFile = createSavepoint(savepointId); // set savepoint settings final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath(savepointFile.getAbsolutePath(), true); final JobGraph jobGraph = createJobGraphWithCheckpointing(savepointRestoreSettings); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); final CheckpointRecoveryFactory testingCheckpointRecoveryFactory = PerJobCheckpointRecoveryFactory.withoutCheckpointStoreRecovery( maxCheckpoints -> completedCheckpointStore); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withHighAvailabilityServices(haServices) .createJobMaster(); try { // we need to start and register the required slots to let the adaptive scheduler // restore from the savepoint jobMaster.start(); final OneShotLatch taskSubmitLatch = new OneShotLatch(); registerSlotsAtJobMaster( 1, jobMaster.getSelfGateway(JobMasterGateway.class), jobGraph.getJobID(), new TestingTaskExecutorGatewayBuilder() .setSubmitTaskConsumer( (taskDeploymentDescriptor, jobMasterId) -> { taskSubmitLatch.trigger(); return CompletableFuture.completedFuture(Acknowledge.get()); }) .createTestingTaskExecutorGateway(), new LocalUnresolvedTaskManagerLocation()); // wait until a task has submitted because this guarantees that the ExecutionGraph has // been created taskSubmitLatch.await(); final CompletedCheckpoint savepointCheckpoint = completedCheckpointStore.getLatestCheckpoint(); assertThat(savepointCheckpoint, Matchers.notNullValue()); assertThat(savepointCheckpoint.getCheckpointID(), is(savepointId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** Tests that an existing checkpoint will have precedence over an savepoint. */ @Test public void testCheckpointPrecedesSavepointRecovery() throws Exception { // create savepoint data final long savepointId = 42L; final File savepointFile = createSavepoint(savepointId); // set savepoint settings final SavepointRestoreSettings savepointRestoreSettings = SavepointRestoreSettings.forPath("" + savepointFile.getAbsolutePath(), true); final JobGraph jobGraph = createJobGraphWithCheckpointing(savepointRestoreSettings); final long checkpointId = 1L; final CompletedCheckpoint completedCheckpoint = new CompletedCheckpoint( jobGraph.getJobID(), checkpointId, 1L, 1L, Collections.emptyMap(), null, CheckpointProperties.forCheckpoint( CheckpointRetentionPolicy.NEVER_RETAIN_AFTER_TERMINATION), new DummyCheckpointStorageLocation()); final StandaloneCompletedCheckpointStore completedCheckpointStore = new StandaloneCompletedCheckpointStore(1); completedCheckpointStore.addCheckpointAndSubsumeOldestOne( completedCheckpoint, new CheckpointsCleaner(), () -> {}); final CheckpointRecoveryFactory testingCheckpointRecoveryFactory = PerJobCheckpointRecoveryFactory.withoutCheckpointStoreRecovery( maxCheckpoints -> completedCheckpointStore); haServices.setCheckpointRecoveryFactory(testingCheckpointRecoveryFactory); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService).createJobMaster(); try { // starting the JobMaster should have read the savepoint final CompletedCheckpoint savepointCheckpoint = completedCheckpointStore.getLatestCheckpoint(); assertThat(savepointCheckpoint, Matchers.notNullValue()); assertThat(savepointCheckpoint.getCheckpointID(), is(checkpointId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** Tests that we can close an unestablished ResourceManager connection. */ @Test public void testCloseUnestablishedResourceManagerConnection() throws Exception { final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .createJobMaster(); try { jobMaster.start(); final TestingResourceManagerGateway firstResourceManagerGateway = createAndRegisterTestingResourceManagerGateway(); final TestingResourceManagerGateway secondResourceManagerGateway = createAndRegisterTestingResourceManagerGateway(); final OneShotLatch firstJobManagerRegistration = new OneShotLatch(); final OneShotLatch secondJobManagerRegistration = new OneShotLatch(); firstResourceManagerGateway.setRegisterJobManagerFunction( (jobMasterId, resourceID, s, jobID) -> { firstJobManagerRegistration.trigger(); return CompletableFuture.completedFuture( firstResourceManagerGateway.getJobMasterRegistrationSuccess()); }); secondResourceManagerGateway.setRegisterJobManagerFunction( (jobMasterId, resourceID, s, jobID) -> { secondJobManagerRegistration.trigger(); return CompletableFuture.completedFuture( secondResourceManagerGateway.getJobMasterRegistrationSuccess()); }); notifyResourceManagerLeaderListeners(firstResourceManagerGateway); // wait until we have seen the first registration attempt firstJobManagerRegistration.await(); // this should stop the connection attempts towards the first RM notifyResourceManagerLeaderListeners(secondResourceManagerGateway); // check that we start registering at the second RM secondJobManagerRegistration.await(); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** Tests that we continue reconnecting to the latest known RM after a disconnection message. */ @Test public void testReconnectionAfterDisconnect() throws Exception { final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withJobMasterId(jobMasterId) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster(); jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { final TestingResourceManagerGateway testingResourceManagerGateway = createAndRegisterTestingResourceManagerGateway(); final BlockingQueue<JobMasterId> registrationsQueue = new ArrayBlockingQueue<>(1); testingResourceManagerGateway.setRegisterJobManagerFunction( (jobMasterId, resourceID, s, jobID) -> { registrationsQueue.offer(jobMasterId); return CompletableFuture.completedFuture( testingResourceManagerGateway.getJobMasterRegistrationSuccess()); }); final ResourceManagerId resourceManagerId = testingResourceManagerGateway.getFencingToken(); notifyResourceManagerLeaderListeners(testingResourceManagerGateway); // wait for first registration attempt final JobMasterId firstRegistrationAttempt = registrationsQueue.take(); assertThat(firstRegistrationAttempt, equalTo(jobMasterId)); assertThat(registrationsQueue.isEmpty(), is(true)); jobMasterGateway.disconnectResourceManager( resourceManagerId, new FlinkException("Test exception")); // wait for the second registration attempt after the disconnect call assertThat(registrationsQueue.take(), equalTo(jobMasterId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** Tests that the a JM connects to the leading RM after regaining leadership. */ @Test public void testResourceManagerConnectionAfterStart() throws Exception { final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withJobMasterId(jobMasterId) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster(); try { final TestingResourceManagerGateway testingResourceManagerGateway = createAndRegisterTestingResourceManagerGateway(); final BlockingQueue<JobMasterId> registrationQueue = new ArrayBlockingQueue<>(1); testingResourceManagerGateway.setRegisterJobManagerFunction( (jobMasterId, resourceID, s, jobID) -> { registrationQueue.offer(jobMasterId); return CompletableFuture.completedFuture( testingResourceManagerGateway.getJobMasterRegistrationSuccess()); }); notifyResourceManagerLeaderListeners(testingResourceManagerGateway); jobMaster.start(); final JobMasterId firstRegistrationAttempt = registrationQueue.take(); assertThat(firstRegistrationAttempt, equalTo(jobMasterId)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** * Tests that input splits assigned to an Execution will be returned to the InputSplitAssigner * if this execution fails. */ @Test @Category(FailsWithAdaptiveScheduler.class) // FLINK-21450 public void testRequestNextInputSplitWithLocalFailover() throws Exception { configuration.setString( JobManagerOptions.EXECUTION_FAILOVER_STRATEGY, FailoverStrategyFactoryLoader.PIPELINED_REGION_RESTART_STRATEGY_NAME); final Function<List<List<InputSplit>>, Collection<InputSplit>> expectFailedExecutionInputSplits = inputSplitsPerTask -> inputSplitsPerTask.get(0); runRequestNextInputSplitTest(expectFailedExecutionInputSplits); } @Test public void testRequestNextInputSplitWithGlobalFailover() throws Exception { configuration.setInteger(RestartStrategyOptions.RESTART_STRATEGY_FIXED_DELAY_ATTEMPTS, 1); configuration.set( RestartStrategyOptions.RESTART_STRATEGY_FIXED_DELAY_DELAY, Duration.ofSeconds(0)); configuration.setString(JobManagerOptions.EXECUTION_FAILOVER_STRATEGY, "full"); final Function<List<List<InputSplit>>, Collection<InputSplit>> expectAllRemainingInputSplits = this::flattenCollection; runRequestNextInputSplitTest(expectAllRemainingInputSplits); } private void runRequestNextInputSplitTest( Function<List<List<InputSplit>>, Collection<InputSplit>> expectedRemainingInputSplits) throws Exception { final int parallelism = 2; final int splitsPerTask = 2; final int totalSplits = parallelism * splitsPerTask; final List<TestingInputSplit> allInputSplits = new ArrayList<>(totalSplits); for (int i = 0; i < totalSplits; i++) { allInputSplits.add(new TestingInputSplit(i)); } final InputSplitSource<TestingInputSplit> inputSplitSource = new TestingInputSplitSource(allInputSplits); JobVertex source = new JobVertex("source"); source.setParallelism(parallelism); source.setInputSplitSource(inputSplitSource); source.setInvokableClass(AbstractInvokable.class); final ExecutionConfig executionConfig = new ExecutionConfig(); executionConfig.setRestartStrategy(RestartStrategies.fixedDelayRestart(100, 0)); final JobGraph inputSplitJobGraph = JobGraphBuilder.newStreamingJobGraphBuilder() .addJobVertex(source) .setExecutionConfig(executionConfig) .build(); final JobMaster jobMaster = new JobMasterBuilder(inputSplitJobGraph, rpcService) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster(); jobMaster.start(); try { final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); registerSlotsRequiredForJobExecution( jobMasterGateway, inputSplitJobGraph.getJobID(), parallelism); waitUntilAllExecutionsAreScheduledOrDeployed(jobMasterGateway); final JobVertexID sourceId = source.getID(); final List<AccessExecution> executions = getExecutions(jobMasterGateway, sourceId); final ExecutionAttemptID initialAttemptId = executions.get(0).getAttemptId(); final List<List<InputSplit>> inputSplitsPerTask = new ArrayList<>(parallelism); // request all input splits for (AccessExecution execution : executions) { inputSplitsPerTask.add( getInputSplits( splitsPerTask, getInputSplitSupplier( sourceId, jobMasterGateway, execution.getAttemptId()))); } final List<InputSplit> allRequestedInputSplits = flattenCollection(inputSplitsPerTask); assertThat( allRequestedInputSplits, containsInAnyOrder(allInputSplits.toArray(EMPTY_TESTING_INPUT_SPLITS))); // fail the first execution to trigger a failover jobMasterGateway .updateTaskExecutionState( new TaskExecutionState(initialAttemptId, ExecutionState.FAILED)) .get(); // wait until the job has been recovered waitUntilAllExecutionsAreScheduledOrDeployed(jobMasterGateway); final ExecutionAttemptID restartedAttemptId = getFirstExecution(jobMasterGateway, sourceId).getAttemptId(); final List<InputSplit> inputSplits = getRemainingInputSplits( getInputSplitSupplier(sourceId, jobMasterGateway, restartedAttemptId)); assertThat( inputSplits, containsInAnyOrder( expectedRemainingInputSplits .apply(inputSplitsPerTask) .toArray(EMPTY_TESTING_INPUT_SPLITS))); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Nonnull private List<InputSplit> flattenCollection(List<List<InputSplit>> inputSplitsPerTask) { return inputSplitsPerTask.stream().flatMap(Collection::stream).collect(Collectors.toList()); } @Nonnull private Supplier<SerializedInputSplit> getInputSplitSupplier( JobVertexID jobVertexID, JobMasterGateway jobMasterGateway, ExecutionAttemptID initialAttemptId) { return () -> getInputSplit(jobMasterGateway, jobVertexID, initialAttemptId); } private void waitUntilAllExecutionsAreScheduledOrDeployed( final JobMasterGateway jobMasterGateway) throws Exception { final Duration duration = Duration.ofMillis(testingTimeout.toMilliseconds()); final Deadline deadline = Deadline.fromNow(duration); CommonTestUtils.waitUntilCondition( () -> { final Collection<AccessExecution> executions = getExecutions(jobMasterGateway); return !executions.isEmpty() && executions.stream() .allMatch( execution -> execution.getState() == ExecutionState.SCHEDULED || execution.getState() == ExecutionState.DEPLOYING); }, deadline); } private static AccessExecution getFirstExecution( final JobMasterGateway jobMasterGateway, final JobVertexID jobVertexId) { final List<AccessExecution> executions = getExecutions(jobMasterGateway, jobVertexId); assertThat(executions, hasSize(greaterThanOrEqualTo(1))); return executions.get(0); } private static Collection<AccessExecution> getExecutions( final JobMasterGateway jobMasterGateway) { final ArchivedExecutionGraph archivedExecutionGraph = requestExecutionGraph(jobMasterGateway).getArchivedExecutionGraph(); return archivedExecutionGraph.getAllVertices().values().stream() .flatMap(vertex -> Arrays.stream(vertex.getTaskVertices())) .map(AccessExecutionVertex::getCurrentExecutionAttempt) .collect(Collectors.toList()); } private static List<AccessExecution> getExecutions( final JobMasterGateway jobMasterGateway, final JobVertexID jobVertexId) { final ArchivedExecutionGraph archivedExecutionGraph = requestExecutionGraph(jobMasterGateway).getArchivedExecutionGraph(); return Optional.ofNullable(archivedExecutionGraph.getAllVertices().get(jobVertexId)) .map( accessExecutionJobVertex -> Arrays.asList(accessExecutionJobVertex.getTaskVertices())) .orElse(Collections.emptyList()).stream() .map(AccessExecutionVertex::getCurrentExecutionAttempt) .collect(Collectors.toList()); } private static ExecutionGraphInfo requestExecutionGraph( final JobMasterGateway jobMasterGateway) { try { return jobMasterGateway.requestJob(testingTimeout).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } @Nonnull private static List<InputSplit> getInputSplits( int numberInputSplits, Supplier<SerializedInputSplit> nextInputSplit) throws Exception { final List<InputSplit> actualInputSplits = new ArrayList<>(numberInputSplits); for (int i = 0; i < numberInputSplits; i++) { final SerializedInputSplit serializedInputSplit = nextInputSplit.get(); assertThat(serializedInputSplit.isEmpty(), is(false)); actualInputSplits.add( InstantiationUtil.deserializeObject( serializedInputSplit.getInputSplitData(), ClassLoader.getSystemClassLoader())); } return actualInputSplits; } private List<InputSplit> getRemainingInputSplits(Supplier<SerializedInputSplit> nextInputSplit) throws Exception { final List<InputSplit> actualInputSplits = new ArrayList<>(16); boolean hasMoreInputSplits = true; while (hasMoreInputSplits) { final SerializedInputSplit serializedInputSplit = nextInputSplit.get(); if (serializedInputSplit.isEmpty()) { hasMoreInputSplits = false; } else { final InputSplit inputSplit = InstantiationUtil.deserializeObject( serializedInputSplit.getInputSplitData(), ClassLoader.getSystemClassLoader()); if (inputSplit == null) { hasMoreInputSplits = false; } else { actualInputSplits.add(inputSplit); } } } return actualInputSplits; } private static SerializedInputSplit getInputSplit( final JobMasterGateway jobMasterGateway, final JobVertexID jobVertexId, final ExecutionAttemptID attemptId) { try { return jobMasterGateway.requestNextInputSplit(jobVertexId, attemptId).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } private static final class TestingInputSplitSource implements InputSplitSource<TestingInputSplit> { private static final long serialVersionUID = -2344684048759139086L; private final List<TestingInputSplit> inputSplits; private TestingInputSplitSource(List<TestingInputSplit> inputSplits) { this.inputSplits = inputSplits; } @Override public TestingInputSplit[] createInputSplits(int minNumSplits) { return inputSplits.toArray(EMPTY_TESTING_INPUT_SPLITS); } @Override public InputSplitAssigner getInputSplitAssigner(TestingInputSplit[] inputSplits) { return new DefaultInputSplitAssigner(inputSplits); } } private static final class TestingInputSplit implements InputSplit { private static final long serialVersionUID = -5404803705463116083L; private final int splitNumber; TestingInputSplit(int number) { this.splitNumber = number; } public int getSplitNumber() { return splitNumber; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TestingInputSplit that = (TestingInputSplit) o; return splitNumber == that.splitNumber; } @Override public int hashCode() { return Objects.hash(splitNumber); } } /** * Tests the {@link JobMaster#requestPartitionState(IntermediateDataSetID, ResultPartitionID)} * call for a finished result partition. */ @Test public void testRequestPartitionState() throws Exception { final JobGraph producerConsumerJobGraph = producerConsumerJobGraph(); final JobMaster jobMaster = new JobMasterBuilder(producerConsumerJobGraph, rpcService) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster(); jobMaster.start(); try { final CompletableFuture<TaskDeploymentDescriptor> tddFuture = new CompletableFuture<>(); final TestingTaskExecutorGateway testingTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setSubmitTaskConsumer( (taskDeploymentDescriptor, jobMasterId) -> { tddFuture.complete(taskDeploymentDescriptor); return CompletableFuture.completedFuture(Acknowledge.get()); }) .createTestingTaskExecutorGateway(); final LocalUnresolvedTaskManagerLocation taskManagerLocation = new LocalUnresolvedTaskManagerLocation(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final Collection<SlotOffer> slotOffers = registerSlotsAtJobMaster( 1, jobMasterGateway, producerConsumerJobGraph.getJobID(), testingTaskExecutorGateway, taskManagerLocation); assertThat(slotOffers, hasSize(1)); // obtain tdd for the result partition ids final TaskDeploymentDescriptor tdd = tddFuture.get(); assertThat(tdd.getProducedPartitions(), hasSize(1)); final ResultPartitionDeploymentDescriptor partition = tdd.getProducedPartitions().iterator().next(); final ExecutionAttemptID executionAttemptId = tdd.getExecutionAttemptId(); final ExecutionAttemptID copiedExecutionAttemptId = new ExecutionAttemptID(executionAttemptId); // finish the producer task jobMasterGateway .updateTaskExecutionState( new TaskExecutionState(executionAttemptId, ExecutionState.FINISHED)) .get(); // request the state of the result partition of the producer final ResultPartitionID partitionId = new ResultPartitionID(partition.getPartitionId(), copiedExecutionAttemptId); CompletableFuture<ExecutionState> partitionStateFuture = jobMasterGateway.requestPartitionState(partition.getResultId(), partitionId); assertThat(partitionStateFuture.get(), equalTo(ExecutionState.FINISHED)); // ask for unknown result partition partitionStateFuture = jobMasterGateway.requestPartitionState( partition.getResultId(), new ResultPartitionID()); try { partitionStateFuture.get(); fail("Expected failure."); } catch (ExecutionException e) { assertThat( ExceptionUtils.findThrowable(e, IllegalArgumentException.class).isPresent(), is(true)); } // ask for wrong intermediate data set id partitionStateFuture = jobMasterGateway.requestPartitionState( new IntermediateDataSetID(), partitionId); try { partitionStateFuture.get(); fail("Expected failure."); } catch (ExecutionException e) { assertThat( ExceptionUtils.findThrowable(e, IllegalArgumentException.class).isPresent(), is(true)); } // ask for "old" execution partitionStateFuture = jobMasterGateway.requestPartitionState( partition.getResultId(), new ResultPartitionID( partition.getPartitionId(), new ExecutionAttemptID())); try { partitionStateFuture.get(); fail("Expected failure."); } catch (ExecutionException e) { assertThat( ExceptionUtils.findThrowable(e, PartitionProducerDisposedException.class) .isPresent(), is(true)); } } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } private void notifyResourceManagerLeaderListeners( TestingResourceManagerGateway testingResourceManagerGateway) { rmLeaderRetrievalService.notifyListener( testingResourceManagerGateway.getAddress(), testingResourceManagerGateway.getFencingToken().toUUID()); } /** * Tests that the timeout in {@link JobMasterGateway#triggerSavepoint(String, boolean, * SavepointFormatType, Time)} is respected. */ @Test public void testTriggerSavepointTimeout() throws Exception { final TestingSchedulerNG testingSchedulerNG = TestingSchedulerNG.newBuilder() .setTriggerSavepointFunction( (ignoredA, ignoredB, formatType) -> new CompletableFuture<>()) .build(); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withFatalErrorHandler(testingFatalErrorHandler) .withSlotPoolServiceSchedulerFactory( DefaultSlotPoolServiceSchedulerFactory.create( TestingSlotPoolServiceBuilder.newBuilder(), new TestingSchedulerNGFactory(testingSchedulerNG))) .createJobMaster(); try { jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final CompletableFuture<String> savepointFutureLowTimeout = jobMasterGateway.triggerSavepoint( "/tmp", false, SavepointFormatType.CANONICAL, Time.milliseconds(1)); final CompletableFuture<String> savepointFutureHighTimeout = jobMasterGateway.triggerSavepoint( "/tmp", false, SavepointFormatType.CANONICAL, RpcUtils.INF_TIMEOUT); try { savepointFutureLowTimeout.get(testingTimeout.getSize(), testingTimeout.getUnit()); fail(); } catch (final ExecutionException e) { final Throwable cause = ExceptionUtils.stripExecutionException(e); assertThat(cause, instanceOf(TimeoutException.class)); } assertThat(savepointFutureHighTimeout.isDone(), is(equalTo(false))); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** Tests that the TaskExecutor is released if all of its slots have been freed. */ @Test public void testReleasingTaskExecutorIfNoMoreSlotsRegistered() throws Exception { final JobGraph jobGraph = createSingleVertexJobWithRestartStrategy(); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster(); final CompletableFuture<JobID> disconnectTaskExecutorFuture = new CompletableFuture<>(); final CompletableFuture<AllocationID> freedSlotFuture = new CompletableFuture<>(); final TestingTaskExecutorGateway testingTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setFreeSlotFunction( (allocationID, throwable) -> { freedSlotFuture.complete(allocationID); return CompletableFuture.completedFuture(Acknowledge.get()); }) .setDisconnectJobManagerConsumer( (jobID, throwable) -> disconnectTaskExecutorFuture.complete(jobID)) .createTestingTaskExecutorGateway(); final LocalUnresolvedTaskManagerLocation taskManagerLocation = new LocalUnresolvedTaskManagerLocation(); try { jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final Collection<SlotOffer> slotOffers = registerSlotsAtJobMaster( 1, jobMasterGateway, jobGraph.getJobID(), testingTaskExecutorGateway, taskManagerLocation); // check that we accepted the offered slot assertThat(slotOffers, hasSize(1)); final AllocationID allocationId = slotOffers.iterator().next().getAllocationId(); // now fail the allocation and check that we close the connection to the TaskExecutor jobMasterGateway.failSlot( taskManagerLocation.getResourceID(), allocationId, new FlinkException("Fail allocation test exception")); // we should free the slot and then disconnect from the TaskExecutor because we use no // longer slots from it assertThat(freedSlotFuture.get(), equalTo(allocationId)); assertThat(disconnectTaskExecutorFuture.get(), equalTo(jobGraph.getJobID())); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testTaskExecutorNotReleasedOnFailedAllocationIfPartitionIsAllocated() throws Exception { final JobManagerSharedServices jobManagerSharedServices = new TestingJobManagerSharedServicesBuilder().build(); final JobGraph jobGraph = JobGraphTestUtils.singleNoOpJobGraph(); final LocalUnresolvedTaskManagerLocation taskManagerUnresolvedLocation = new LocalUnresolvedTaskManagerLocation(); final AtomicBoolean isTrackingPartitions = new AtomicBoolean(true); final TestingJobMasterPartitionTracker partitionTracker = new TestingJobMasterPartitionTracker(); partitionTracker.setIsTrackingPartitionsForFunction(ignored -> isTrackingPartitions.get()); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withJobManagerSharedServices(jobManagerSharedServices) .withHeartbeatServices(heartbeatServices) .withPartitionTrackerFactory(ignored -> partitionTracker) .createJobMaster(); final CompletableFuture<JobID> disconnectTaskExecutorFuture = new CompletableFuture<>(); final CompletableFuture<AllocationID> freedSlotFuture = new CompletableFuture<>(); final TestingTaskExecutorGateway testingTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setFreeSlotFunction( (allocationID, throwable) -> { freedSlotFuture.complete(allocationID); return CompletableFuture.completedFuture(Acknowledge.get()); }) .setDisconnectJobManagerConsumer( (jobID, throwable) -> disconnectTaskExecutorFuture.complete(jobID)) .createTestingTaskExecutorGateway(); try { jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final Collection<SlotOffer> slotOffers = registerSlotsAtJobMaster( 1, jobMasterGateway, jobGraph.getJobID(), testingTaskExecutorGateway, taskManagerUnresolvedLocation); // check that we accepted the offered slot assertThat(slotOffers, hasSize(1)); final AllocationID allocationId = slotOffers.iterator().next().getAllocationId(); jobMasterGateway.failSlot( taskManagerUnresolvedLocation.getResourceID(), allocationId, new FlinkException("Fail allocation test exception")); // we should free the slot, but not disconnect from the TaskExecutor as we still have an // allocated partition assertThat(freedSlotFuture.get(), equalTo(allocationId)); // trigger some request to guarantee ensure the slotAllocationFailure processing if // complete jobMasterGateway.requestJobStatus(Time.seconds(5)).get(); assertThat(disconnectTaskExecutorFuture.isDone(), is(false)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } /** Tests the updateGlobalAggregate functionality. */ @Test public void testJobMasterAggregatesValuesCorrectly() throws Exception { final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withConfiguration(configuration) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .createJobMaster(); jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); try { CompletableFuture<Object> updateAggregateFuture; AggregateFunction<Integer, Integer, Integer> aggregateFunction = createAggregateFunction(); ClosureCleaner.clean( aggregateFunction, ExecutionConfig.ClosureCleanerLevel.RECURSIVE, true); byte[] serializedAggregateFunction = InstantiationUtil.serializeObject(aggregateFunction); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 1, serializedAggregateFunction); assertThat(updateAggregateFuture.get(), equalTo(1)); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 2, serializedAggregateFunction); assertThat(updateAggregateFuture.get(), equalTo(3)); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 3, serializedAggregateFunction); assertThat(updateAggregateFuture.get(), equalTo(6)); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg1", 4, serializedAggregateFunction); assertThat(updateAggregateFuture.get(), equalTo(10)); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg2", 10, serializedAggregateFunction); assertThat(updateAggregateFuture.get(), equalTo(10)); updateAggregateFuture = jobMasterGateway.updateGlobalAggregate("agg2", 23, serializedAggregateFunction); assertThat(updateAggregateFuture.get(), equalTo(33)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } private AggregateFunction<Integer, Integer, Integer> createAggregateFunction() { return new AggregateFunction<Integer, Integer, Integer>() { @Override public Integer createAccumulator() { return 0; } @Override public Integer add(Integer value, Integer accumulator) { return accumulator + value; } @Override public Integer getResult(Integer accumulator) { return accumulator; } @Override public Integer merge(Integer a, Integer b) { return add(a, b); } }; } @Nonnull private TestingResourceManagerGateway createAndRegisterTestingResourceManagerGateway() { final TestingResourceManagerGateway testingResourceManagerGateway = new TestingResourceManagerGateway(); rpcService.registerGateway( testingResourceManagerGateway.getAddress(), testingResourceManagerGateway); return testingResourceManagerGateway; } /** * Tests that the job execution is failed if the TaskExecutor disconnects from the JobMaster. */ @Test public void testJobFailureWhenGracefulTaskExecutorTermination() throws Exception { runJobFailureWhenTaskExecutorTerminatesTest( heartbeatServices, (localTaskManagerLocation, jobMasterGateway) -> jobMasterGateway.disconnectTaskManager( localTaskManagerLocation.getResourceID(), new FlinkException("Test disconnectTaskManager exception.")), (jobMasterGateway, resourceID) -> (ignoredA, ignoredB) -> FutureUtils.completedVoidFuture()); } @Test public void testJobFailureWhenTaskExecutorHeartbeatTimeout() throws Exception { final TestingHeartbeatServices testingHeartbeatService = new TestingHeartbeatServices(heartbeatInterval, heartbeatTimeout); runJobFailureWhenTaskExecutorTerminatesTest( testingHeartbeatService, (localTaskManagerLocation, jobMasterGateway) -> testingHeartbeatService.triggerHeartbeatTimeout( jmResourceId, localTaskManagerLocation.getResourceID()), (jobMasterGateway, taskManagerResourceId) -> (resourceId, ignored) -> { jobMasterGateway.heartbeatFromTaskManager( taskManagerResourceId, TaskExecutorToJobManagerHeartbeatPayload.empty()); return FutureUtils.completedVoidFuture(); }); } /** * Tests that the JobMaster rejects a TaskExecutor registration attempt if the expected and * actual JobID are not equal. See FLINK-21606. */ @Test public void testJobMasterRejectsTaskExecutorRegistrationIfJobIdsAreNotEqual() throws Exception { final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService).createJobMaster(); try { jobMaster.start(); final CompletableFuture<RegistrationResponse> registrationResponse = jobMaster.registerTaskManager( new JobID(), TaskManagerRegistrationInformation.create( "foobar", new LocalUnresolvedTaskManagerLocation(), TestingUtils.zeroUUID()), testingTimeout); assertThat(registrationResponse.get(), instanceOf(JMTMRegistrationRejection.class)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testJobMasterAcknowledgesDuplicateTaskExecutorRegistrations() throws Exception { final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService).createJobMaster(); final TestingTaskExecutorGateway testingTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder().createTestingTaskExecutorGateway(); rpcService.registerGateway( testingTaskExecutorGateway.getAddress(), testingTaskExecutorGateway); try { jobMaster.start(); final TaskManagerRegistrationInformation taskManagerRegistrationInformation = TaskManagerRegistrationInformation.create( testingTaskExecutorGateway.getAddress(), new LocalUnresolvedTaskManagerLocation(), UUID.randomUUID()); final CompletableFuture<RegistrationResponse> firstRegistrationResponse = jobMaster.registerTaskManager( jobGraph.getJobID(), taskManagerRegistrationInformation, testingTimeout); final CompletableFuture<RegistrationResponse> secondRegistrationResponse = jobMaster.registerTaskManager( jobGraph.getJobID(), taskManagerRegistrationInformation, testingTimeout); assertThat(firstRegistrationResponse.get(), instanceOf(JMTMRegistrationSuccess.class)); assertThat(secondRegistrationResponse.get(), instanceOf(JMTMRegistrationSuccess.class)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testJobMasterDisconnectsOldTaskExecutorIfNewSessionIsSeen() throws Exception { final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService).createJobMaster(); final CompletableFuture<Void> firstTaskExecutorDisconnectedFuture = new CompletableFuture<>(); final TestingTaskExecutorGateway firstTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setAddress("firstTaskExecutor") .setDisconnectJobManagerConsumer( (jobID, throwable) -> firstTaskExecutorDisconnectedFuture.complete(null)) .createTestingTaskExecutorGateway(); final TestingTaskExecutorGateway secondTaskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setAddress("secondTaskExecutor") .createTestingTaskExecutorGateway(); rpcService.registerGateway(firstTaskExecutorGateway.getAddress(), firstTaskExecutorGateway); rpcService.registerGateway( secondTaskExecutorGateway.getAddress(), secondTaskExecutorGateway); try { jobMaster.start(); final LocalUnresolvedTaskManagerLocation taskManagerLocation = new LocalUnresolvedTaskManagerLocation(); final UUID firstTaskManagerSessionId = UUID.randomUUID(); final CompletableFuture<RegistrationResponse> firstRegistrationResponse = jobMaster.registerTaskManager( jobGraph.getJobID(), TaskManagerRegistrationInformation.create( firstTaskExecutorGateway.getAddress(), taskManagerLocation, firstTaskManagerSessionId), testingTimeout); assertThat(firstRegistrationResponse.get(), instanceOf(JMTMRegistrationSuccess.class)); final UUID secondTaskManagerSessionId = UUID.randomUUID(); final CompletableFuture<RegistrationResponse> secondRegistrationResponse = jobMaster.registerTaskManager( jobGraph.getJobID(), TaskManagerRegistrationInformation.create( secondTaskExecutorGateway.getAddress(), taskManagerLocation, secondTaskManagerSessionId), testingTimeout); assertThat(secondRegistrationResponse.get(), instanceOf(JMTMRegistrationSuccess.class)); // the first TaskExecutor should be disconnected firstTaskExecutorDisconnectedFuture.get(); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } @Test public void testJobMasterOnlyTerminatesAfterTheSchedulerHasClosed() throws Exception { final CompletableFuture<Void> schedulerTerminationFuture = new CompletableFuture<>(); final TestingSchedulerNG testingSchedulerNG = TestingSchedulerNG.newBuilder() .setCloseAsyncSupplier(() -> schedulerTerminationFuture) .build(); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withSlotPoolServiceSchedulerFactory( DefaultSlotPoolServiceSchedulerFactory.create( TestingSlotPoolServiceBuilder.newBuilder(), new TestingSchedulerNGFactory(testingSchedulerNG))) .createJobMaster(); jobMaster.start(); final CompletableFuture<Void> jobMasterTerminationFuture = jobMaster.closeAsync(); try { jobMasterTerminationFuture.get(10L, TimeUnit.MILLISECONDS); fail("Expected TimeoutException because the JobMaster should not terminate."); } catch (TimeoutException expected) { } schedulerTerminationFuture.complete(null); jobMasterTerminationFuture.get(); } @Test public void testJobMasterAcceptsSlotsWhenJobIsRestarting() throws Exception { configuration.set(RestartStrategyOptions.RESTART_STRATEGY, "fixed-delay"); configuration.set( RestartStrategyOptions.RESTART_STRATEGY_FIXED_DELAY_DELAY, Duration.ofDays(1)); final int numberSlots = 1; final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withConfiguration(configuration) .createJobMaster(); try { jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final LocalUnresolvedTaskManagerLocation unresolvedTaskManagerLocation = new LocalUnresolvedTaskManagerLocation(); registerSlotsAtJobMaster( numberSlots, jobMasterGateway, jobGraph.getJobID(), new TestingTaskExecutorGatewayBuilder() .setAddress("firstTaskManager") .createTestingTaskExecutorGateway(), unresolvedTaskManagerLocation); CommonTestUtils.waitUntilCondition( () -> jobMasterGateway.requestJobStatus(testingTimeout).get() == JobStatus.RUNNING, Deadline.fromNow(TimeUtils.toDuration(testingTimeout))); jobMasterGateway.disconnectTaskManager( unresolvedTaskManagerLocation.getResourceID(), new FlinkException("Test exception.")); CommonTestUtils.waitUntilCondition( () -> jobMasterGateway.requestJobStatus(testingTimeout).get() == JobStatus.RESTARTING, Deadline.fromNow(TimeUtils.toDuration(testingTimeout))); assertThat( registerSlotsAtJobMaster( numberSlots, jobMasterGateway, jobGraph.getJobID(), new TestingTaskExecutorGatewayBuilder() .setAddress("secondTaskManager") .createTestingTaskExecutorGateway(), new LocalUnresolvedTaskManagerLocation()), hasSize(numberSlots)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } private void runJobFailureWhenTaskExecutorTerminatesTest( HeartbeatServices heartbeatServices, BiConsumer<LocalUnresolvedTaskManagerLocation, JobMasterGateway> jobReachedRunningState, BiFunction< JobMasterGateway, ResourceID, BiFunction<ResourceID, AllocatedSlotReport, CompletableFuture<Void>>> heartbeatConsumerFunction) throws Exception { final JobGraph jobGraph = JobGraphTestUtils.singleNoOpJobGraph(); final JobMasterBuilder.TestingOnCompletionActions onCompletionActions = new JobMasterBuilder.TestingOnCompletionActions(); final JobMaster jobMaster = new JobMasterBuilder(jobGraph, rpcService) .withResourceId(jmResourceId) .withHighAvailabilityServices(haServices) .withHeartbeatServices(heartbeatServices) .withOnCompletionActions(onCompletionActions) .createJobMaster(); try { jobMaster.start(); final JobMasterGateway jobMasterGateway = jobMaster.getSelfGateway(JobMasterGateway.class); final LocalUnresolvedTaskManagerLocation taskManagerUnresolvedLocation = new LocalUnresolvedTaskManagerLocation(); final CompletableFuture<ExecutionAttemptID> taskDeploymentFuture = new CompletableFuture<>(); final TestingTaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setSubmitTaskConsumer( (taskDeploymentDescriptor, jobMasterId) -> { taskDeploymentFuture.complete( taskDeploymentDescriptor.getExecutionAttemptId()); return CompletableFuture.completedFuture(Acknowledge.get()); }) .setHeartbeatJobManagerFunction( heartbeatConsumerFunction.apply( jobMasterGateway, taskManagerUnresolvedLocation.getResourceID())) .createTestingTaskExecutorGateway(); final Collection<SlotOffer> slotOffers = registerSlotsAtJobMaster( 1, jobMasterGateway, jobGraph.getJobID(), taskExecutorGateway, taskManagerUnresolvedLocation); assertThat(slotOffers, hasSize(1)); final ExecutionAttemptID executionAttemptId = taskDeploymentFuture.get(); jobMasterGateway .updateTaskExecutionState( new TaskExecutionState(executionAttemptId, ExecutionState.INITIALIZING)) .get(); jobMasterGateway .updateTaskExecutionState( new TaskExecutionState(executionAttemptId, ExecutionState.RUNNING)) .get(); jobReachedRunningState.accept(taskManagerUnresolvedLocation, jobMasterGateway); final ArchivedExecutionGraph archivedExecutionGraph = onCompletionActions .getJobReachedGloballyTerminalStateFuture() .get() .getArchivedExecutionGraph(); assertThat(archivedExecutionGraph.getState(), is(JobStatus.FAILED)); } finally { RpcUtils.terminateRpcEndpoint(jobMaster, testingTimeout); } } private Collection<SlotOffer> registerSlotsAtJobMaster( int numberSlots, JobMasterGateway jobMasterGateway, JobID jobId, TaskExecutorGateway taskExecutorGateway, UnresolvedTaskManagerLocation unresolvedTaskManagerLocation) throws ExecutionException, InterruptedException { rpcService.registerGateway(taskExecutorGateway.getAddress(), taskExecutorGateway); jobMasterGateway .registerTaskManager( jobId, TaskManagerRegistrationInformation.create( taskExecutorGateway.getAddress(), unresolvedTaskManagerLocation, TestingUtils.zeroUUID()), testingTimeout) .get(); Collection<SlotOffer> slotOffers = IntStream.range(0, numberSlots) .mapToObj( index -> new SlotOffer( new AllocationID(), index, ResourceProfile.ANY)) .collect(Collectors.toList()); return jobMasterGateway .offerSlots( unresolvedTaskManagerLocation.getResourceID(), slotOffers, testingTimeout) .get(); } private JobGraph producerConsumerJobGraph() { final JobVertex producer = new JobVertex("Producer"); producer.setInvokableClass(NoOpInvokable.class); final JobVertex consumer = new JobVertex("Consumer"); consumer.setInvokableClass(NoOpInvokable.class); consumer.connectNewDataSetAsInput( producer, DistributionPattern.POINTWISE, ResultPartitionType.BLOCKING); return JobGraphTestUtils.batchJobGraph(producer, consumer); } private File createSavepoint(long savepointId) throws IOException { return TestUtils.createSavepointWithOperatorState(temporaryFolder.newFile(), savepointId); } @Nonnull private JobGraph createJobGraphWithCheckpointing( SavepointRestoreSettings savepointRestoreSettings) { final JobVertex source = new JobVertex("source"); source.setInvokableClass(NoOpInvokable.class); source.setParallelism(1); return TestUtils.createJobGraphFromJobVerticesWithCheckpointing( savepointRestoreSettings, source); } private JobGraph createSingleVertexJobWithRestartStrategy() throws IOException { final JobGraph jobGraph = JobGraphTestUtils.singleNoOpJobGraph(); final ExecutionConfig executionConfig = new ExecutionConfig(); executionConfig.setRestartStrategy( RestartStrategies.fixedDelayRestart(Integer.MAX_VALUE, 0L)); jobGraph.setExecutionConfig(executionConfig); return jobGraph; } private static final class DummyCheckpointStorageLocation implements CompletedCheckpointStorageLocation { private static final long serialVersionUID = 164095949572620688L; @Override public String getExternalPointer() { return null; } @Override public StreamStateHandle getMetadataHandle() { return null; } @Override public void disposeStorageLocation() throws IOException {} } private static void registerSlotsRequiredForJobExecution( JobMasterGateway jobMasterGateway, JobID jobId, int numSlots) throws ExecutionException, InterruptedException { final TaskExecutorGateway taskExecutorGateway = new TestingTaskExecutorGatewayBuilder() .setCancelTaskFunction( executionAttemptId -> { jobMasterGateway.updateTaskExecutionState( new TaskExecutionState( executionAttemptId, ExecutionState.CANCELED)); return CompletableFuture.completedFuture(Acknowledge.get()); }) .createTestingTaskExecutorGateway(); JobMasterTestUtils.registerTaskExecutorAndOfferSlots( rpcService, jobMasterGateway, jobId, numSlots, taskExecutorGateway, testingTimeout); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.hadoop.inputformat; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.net.ServerSocket; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import org.apache.beam.sdk.io.common.HashingFn; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Combine; import org.apache.beam.sdk.transforms.Count; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.SimpleFunction; import org.apache.beam.sdk.transforms.Values; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.InputFormat; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.hadoop.cfg.ConfigurationOptions; import org.elasticsearch.hadoop.mr.EsInputFormat; import org.elasticsearch.hadoop.mr.LinkedMapWritable; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.transport.Netty4Plugin; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Tests to validate HadoopInputFormatIO for embedded Elasticsearch instance. * * {@link EsInputFormat} can be used to read data from Elasticsearch. EsInputFormat by default * returns key class as Text and value class as LinkedMapWritable. You can also set MapWritable as * value class, provided that you set the property "mapred.mapoutput.value.class" with * MapWritable.class. If this property is not set then, using MapWritable as value class may give * org.apache.beam.sdk.coders.CoderException due to unexpected extra bytes after decoding. */ @RunWith(JUnit4.class) public class HIFIOWithElasticTest implements Serializable { private static final long serialVersionUID = 1L; private static final Logger LOG = LoggerFactory.getLogger(HIFIOWithElasticTest.class); private static final String ELASTIC_IN_MEM_HOSTNAME = "127.0.0.1"; private static String elasticInMemPort = "9200"; private static final String ELASTIC_INTERNAL_VERSION = "5.x"; private static final String TRUE = "true"; private static final String ELASTIC_INDEX_NAME = "beamdb"; private static final String ELASTIC_TYPE_NAME = "scientists"; private static final String ELASTIC_RESOURCE = "/" + ELASTIC_INDEX_NAME + "/" + ELASTIC_TYPE_NAME; private static final int TEST_DATA_ROW_COUNT = 10; private static final String ELASTIC_TYPE_ID_PREFIX = "s"; @ClassRule public static TemporaryFolder elasticTempFolder = new TemporaryFolder(); @Rule public final transient TestPipeline pipeline = TestPipeline.create(); @BeforeClass public static void startServer() throws NodeValidationException, InterruptedException, IOException { ServerSocket serverSocket = new ServerSocket(0); int port = serverSocket.getLocalPort(); serverSocket.close(); elasticInMemPort = String.valueOf(port); ElasticEmbeddedServer.startElasticEmbeddedServer(); } /** * Test to read data from embedded Elasticsearch instance and verify whether data is read * successfully. */ @Test public void testHifIOWithElastic() { // Expected hashcode is evaluated during insertion time one time and hardcoded here. String expectedHashCode = "a62a85f5f081e3840baf1028d4d6c6bc"; Configuration conf = getConfiguration(); PCollection<KV<Text, LinkedMapWritable>> esData = pipeline.apply(HadoopInputFormatIO.<Text, LinkedMapWritable>read().withConfiguration(conf)); PCollection<Long> count = esData.apply(Count.<KV<Text, LinkedMapWritable>>globally()); // Verify that the count of objects fetched using HIFInputFormat IO is correct. PAssert.thatSingleton(count).isEqualTo((long) TEST_DATA_ROW_COUNT); PCollection<LinkedMapWritable> values = esData.apply(Values.<LinkedMapWritable>create()); PCollection<String> textValues = values.apply(transformFunc); // Verify the output values using checksum comparison. PCollection<String> consolidatedHashcode = textValues.apply(Combine.globally(new HashingFn()).withoutDefaults()); PAssert.that(consolidatedHashcode).containsInAnyOrder(expectedHashCode); pipeline.run().waitUntilFinish(); } MapElements<LinkedMapWritable, String> transformFunc = MapElements.<LinkedMapWritable, String>via(new SimpleFunction<LinkedMapWritable, String>() { @Override public String apply(LinkedMapWritable mapw) { return mapw.get(new Text("id")) + "|" + mapw.get(new Text("scientist")); } }); /** * Test to read data from embedded Elasticsearch instance based on query and verify whether data * is read successfully. */ @Test public void testHifIOWithElasticQuery() { long expectedRowCount = 1L; String expectedHashCode = "cfbf3e5c993d44e57535a114e25f782d"; Configuration conf = getConfiguration(); String fieldValue = ELASTIC_TYPE_ID_PREFIX + "2"; String query = "{" + " \"query\": {" + " \"match\" : {" + " \"id\" : {" + " \"query\" : \"" + fieldValue + "\"," + " \"type\" : \"boolean\"" + " }" + " }" + " }" + "}"; conf.set(ConfigurationOptions.ES_QUERY, query); PCollection<KV<Text, LinkedMapWritable>> esData = pipeline.apply(HadoopInputFormatIO.<Text, LinkedMapWritable>read().withConfiguration(conf)); PCollection<Long> count = esData.apply(Count.<KV<Text, LinkedMapWritable>>globally()); // Verify that the count of objects fetched using HIFInputFormat IO is correct. PAssert.thatSingleton(count).isEqualTo(expectedRowCount); PCollection<LinkedMapWritable> values = esData.apply(Values.<LinkedMapWritable>create()); PCollection<String> textValues = values.apply(transformFunc); // Verify the output values using checksum comparison. PCollection<String> consolidatedHashcode = textValues.apply(Combine.globally(new HashingFn()).withoutDefaults()); PAssert.that(consolidatedHashcode).containsInAnyOrder(expectedHashCode); pipeline.run().waitUntilFinish(); } /** * Set the Elasticsearch configuration parameters in the Hadoop configuration object. * Configuration object should have InputFormat class, key class and value class set. Mandatory * fields for ESInputFormat to be set are es.resource, es.nodes, es.port, es.internal.es.version. * Please refer to * <a href="https://www.elastic.co/guide/en/elasticsearch/hadoop/current/configuration.html" * >Elasticsearch Configuration</a> for more details. */ public Configuration getConfiguration() { Configuration conf = new Configuration(); conf.set(ConfigurationOptions.ES_NODES, ELASTIC_IN_MEM_HOSTNAME); conf.set(ConfigurationOptions.ES_PORT, String.format("%s", elasticInMemPort)); conf.set(ConfigurationOptions.ES_RESOURCE, ELASTIC_RESOURCE); conf.set("es.internal.es.version", ELASTIC_INTERNAL_VERSION); conf.set(ConfigurationOptions.ES_NODES_DISCOVERY, TRUE); conf.set(ConfigurationOptions.ES_INDEX_AUTO_CREATE, TRUE); conf.setClass("mapreduce.job.inputformat.class", org.elasticsearch.hadoop.mr.EsInputFormat.class, InputFormat.class); conf.setClass("key.class", Text.class, Object.class); conf.setClass("value.class", LinkedMapWritable.class, Object.class); return conf; } private static Map<String, String> createElasticRow(String id, String name) { Map<String, String> data = new HashMap<String, String>(); data.put("id", id); data.put("scientist", name); return data; } @AfterClass public static void shutdownServer() throws IOException { ElasticEmbeddedServer.shutdown(); } /** * Class for in memory Elasticsearch server. */ static class ElasticEmbeddedServer implements Serializable { private static final long serialVersionUID = 1L; private static Node node; public static void startElasticEmbeddedServer() throws NodeValidationException, InterruptedException { Settings settings = Settings.builder() .put("node.data", TRUE) .put("network.host", ELASTIC_IN_MEM_HOSTNAME) .put("http.port", elasticInMemPort) .put("path.data", elasticTempFolder.getRoot().getPath()) .put("path.home", elasticTempFolder.getRoot().getPath()) .put("transport.type", "local") .put("http.enabled", TRUE) .put("node.ingest", TRUE).build(); node = new PluginNode(settings); node.start(); LOG.info("Elastic in memory server started."); prepareElasticIndex(); LOG.info("Prepared index " + ELASTIC_INDEX_NAME + "and populated data on elastic in memory server."); } /** * Prepares Elastic index, by adding rows. */ private static void prepareElasticIndex() throws InterruptedException { CreateIndexRequest indexRequest = new CreateIndexRequest(ELASTIC_INDEX_NAME); node.client().admin().indices().create(indexRequest).actionGet(); for (int i = 0; i < TEST_DATA_ROW_COUNT; i++) { node.client().prepareIndex(ELASTIC_INDEX_NAME, ELASTIC_TYPE_NAME, String.valueOf(i)) .setSource(createElasticRow(ELASTIC_TYPE_ID_PREFIX + i, "Faraday" + i)).execute() .actionGet(); } node.client().admin().indices().prepareRefresh(ELASTIC_INDEX_NAME).get(); } /** * Shutdown the embedded instance. * @throws IOException */ public static void shutdown() throws IOException { DeleteIndexRequest indexRequest = new DeleteIndexRequest(ELASTIC_INDEX_NAME); node.client().admin().indices().delete(indexRequest).actionGet(); LOG.info("Deleted index " + ELASTIC_INDEX_NAME + " from elastic in memory server"); node.close(); LOG.info("Closed elastic in memory server node."); deleteElasticDataDirectory(); } private static void deleteElasticDataDirectory() { try { FileUtils.deleteDirectory(new File(elasticTempFolder.getRoot().getPath())); } catch (IOException e) { throw new RuntimeException("Could not delete elastic data directory: " + e.getMessage(), e); } } } /** * Class created for handling "http.enabled" property as "true" for Elasticsearch node. */ static class PluginNode extends Node implements Serializable { private static final long serialVersionUID = 1L; static Collection<Class<? extends Plugin>> list = new ArrayList<Class<? extends Plugin>>(); static { list.add(Netty4Plugin.class); } public PluginNode(final Settings settings) { super(InternalSettingsPreparer.prepareEnvironment(settings, null), list); } } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.trans.steps.nullif; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.events.ShellAdapter; import org.eclipse.swt.events.ShellEvent; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.layout.FormLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Event; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Listener; import org.eclipse.swt.widgets.Shell; import org.eclipse.swt.widgets.TableItem; import org.eclipse.swt.widgets.Text; import org.pentaho.di.core.Const; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDialogInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.steps.nullif.NullIfMeta; import org.pentaho.di.ui.core.dialog.ErrorDialog; import org.pentaho.di.ui.core.widget.ColumnInfo; import org.pentaho.di.ui.core.widget.TableView; import org.pentaho.di.ui.trans.step.BaseStepDialog; public class NullIfDialog extends BaseStepDialog implements StepDialogInterface { private static Class<?> PKG = NullIfMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private Label wlStepname; private Text wStepname; private FormData fdlStepname, fdStepname; private Label wlFields; private TableView wFields; private FormData fdlFields, fdFields; private NullIfMeta input; private ColumnInfo[] colinf; private Map<String, Integer> inputFields; public NullIfDialog(Shell parent, Object in, TransMeta transMeta, String sname) { super(parent, (BaseStepMeta)in, transMeta, sname); input=(NullIfMeta)in; inputFields =new HashMap<String, Integer>(); } public String open() { Shell parent = getParent(); Display display = parent.getDisplay(); shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MAX | SWT.MIN); props.setLook(shell); setShellImage(shell, input); ModifyListener lsMod = new ModifyListener() { public void modifyText(ModifyEvent e) { input.setChanged(); } }; changed = input.hasChanged(); FormLayout formLayout = new FormLayout (); formLayout.marginWidth = Const.FORM_MARGIN; formLayout.marginHeight = Const.FORM_MARGIN; shell.setLayout(formLayout); shell.setText(BaseMessages.getString(PKG, "NullIfDialog.Shell.Label")); //$NON-NLS-1$ int middle = props.getMiddlePct(); int margin = Const.MARGIN; // Stepname line wlStepname=new Label(shell, SWT.RIGHT); wlStepname.setText(BaseMessages.getString(PKG, "NullIfDialog.Stepname.Label")); //$NON-NLS-1$ props.setLook(wlStepname); fdlStepname=new FormData(); fdlStepname.left = new FormAttachment(0, 0); fdlStepname.right= new FormAttachment(middle, -margin); fdlStepname.top = new FormAttachment(0, margin); wlStepname.setLayoutData(fdlStepname); wStepname=new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER); wStepname.setText(stepname); props.setLook(wStepname); wStepname.addModifyListener(lsMod); fdStepname=new FormData(); fdStepname.left = new FormAttachment(middle, 0); fdStepname.top = new FormAttachment(0, margin); fdStepname.right= new FormAttachment(100, 0); wStepname.setLayoutData(fdStepname); wlFields=new Label(shell, SWT.NONE); wlFields.setText(BaseMessages.getString(PKG, "NullIfDialog.Fields.Label")); //$NON-NLS-1$ props.setLook(wlFields); fdlFields=new FormData(); fdlFields.left = new FormAttachment(0, 0); fdlFields.top = new FormAttachment(wStepname, margin); wlFields.setLayoutData(fdlFields); final int FieldsCols=2; final int FieldsRows=input.getFieldName().length; colinf=new ColumnInfo[FieldsCols]; colinf[0]=new ColumnInfo(BaseMessages.getString(PKG, "NullIfDialog.ColumnInfo.Name"), ColumnInfo.COLUMN_TYPE_CCOMBO, new String[] { "" }, false); colinf[1]=new ColumnInfo(BaseMessages.getString(PKG, "NullIfDialog.ColumnInfo.ValueToNull"), ColumnInfo.COLUMN_TYPE_TEXT, false); //$NON-NLS-1$ wFields=new TableView(transMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI, colinf, FieldsRows, lsMod, props ); fdFields=new FormData(); fdFields.left = new FormAttachment(0, 0); fdFields.top = new FormAttachment(wlFields, margin); fdFields.right = new FormAttachment(100, 0); fdFields.bottom= new FormAttachment(100, -50); wFields.setLayoutData(fdFields); // // Search the fields in the background final Runnable runnable = new Runnable() { public void run() { StepMeta stepMeta = transMeta.findStep(stepname); if (stepMeta!=null) { try { RowMetaInterface row = transMeta.getPrevStepFields(stepMeta); // Remember these fields... for (int i=0;i<row.size();i++) { inputFields.put(row.getValueMeta(i).getName(), Integer.valueOf(i)); } setComboBoxes(); } catch(KettleException e) { logError(BaseMessages.getString(PKG, "System.Dialog.GetFieldsFailed.Message")); } } } }; new Thread(runnable).start(); // Some buttons wOK=new Button(shell, SWT.PUSH); wOK.setText(BaseMessages.getString(PKG, "System.Button.OK")); //$NON-NLS-1$ wGet=new Button(shell, SWT.PUSH); wGet.setText(BaseMessages.getString(PKG, "System.Button.GetFields")); //$NON-NLS-1$ wCancel=new Button(shell, SWT.PUSH); wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel")); //$NON-NLS-1$ setButtonPositions(new Button[] { wOK, wCancel, wGet }, margin, wFields); // Add listeners lsCancel = new Listener() { public void handleEvent(Event e) { cancel(); } }; lsGet = new Listener() { public void handleEvent(Event e) { get(); } }; lsOK = new Listener() { public void handleEvent(Event e) { ok(); } }; wCancel.addListener(SWT.Selection, lsCancel); wGet.addListener (SWT.Selection, lsGet ); wOK.addListener (SWT.Selection, lsOK ); lsDef=new SelectionAdapter() { public void widgetDefaultSelected(SelectionEvent e) { ok(); } }; wStepname.addSelectionListener( lsDef ); // Detect X or ALT-F4 or something that kills this window... shell.addShellListener( new ShellAdapter() { public void shellClosed(ShellEvent e) { cancel(); } } ); // Set the shell size, based upon previous time... setSize(); getData(); input.setChanged(changed); shell.open(); while (!shell.isDisposed()) { if (!display.readAndDispatch()) display.sleep(); } return stepname; } protected void setComboBoxes() { // Something was changed in the row. // final Map<String, Integer> fields = new HashMap<String, Integer>(); // Add the currentMeta fields... fields.putAll(inputFields); Set<String> keySet = fields.keySet(); List<String> entries = new ArrayList<String>(keySet); String fieldNames[] = (String[]) entries.toArray(new String[entries.size()]); Const.sortStrings(fieldNames); colinf[0].setComboValues(fieldNames); } /** * Copy information from the meta-data input to the dialog fields. */ public void getData() { wStepname.setText(stepname); for (int i=0;i<input.getFieldName().length;i++) { TableItem item = wFields.table.getItem(i); String name = input.getFieldName()[i]; String type = input.getFieldValue()[i]; if (name!=null) item.setText(1, name); if (type!=null) item.setText(2, type); } wFields.setRowNums(); wFields.optWidth(true); wStepname.selectAll(); } private void cancel() { stepname=null; input.setChanged(changed); dispose(); } private void ok() { if (Const.isEmpty(wStepname.getText())) return; stepname = wStepname.getText(); // return value //Table table = wFields.table; int count = wFields.nrNonEmpty(); input.allocate(count); for (int i=0;i<count;i++) { TableItem item = wFields.getNonEmpty(i); input.getFieldName()[i] = item.getText(1); input.getFieldValue()[i] = item.getText(2); } dispose(); } private void get() { try { RowMetaInterface r = transMeta.getPrevStepFields(stepname); if (r!=null && !r.isEmpty()) { BaseStepDialog.getFieldsFromPrevious(r, wFields, 1, new int[] { 1 }, null, -1, -1, null); } } catch(KettleException ke) { new ErrorDialog(shell, BaseMessages.getString(PKG, "System.Dialog.GetFieldsFailed.Title"), BaseMessages.getString(PKG, "System.Dialog.GetFieldsFailed.Message"), ke); //$NON-NLS-1$ //$NON-NLS-2$ } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.lucene.internal.filesystem; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.atLeast; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Method; import java.nio.file.Files; import java.util.Arrays; import java.util.Map; import java.util.Random; import java.util.concurrent.ConcurrentHashMap; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TemporaryFolder; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.apache.geode.cache.CacheClosedException; import org.apache.geode.test.junit.categories.LuceneTest; @Category({LuceneTest.class}) public class FileSystemJUnitTest { private static final int SMALL_CHUNK = 523; private static final int LARGE_CHUNK = 1024 * 1024 * 5 + 33; private FileSystem system; private Random rand = new Random(); private ConcurrentHashMap fileAndChunkRegion; @Rule public TemporaryFolder tempFolderRule = new TemporaryFolder(); private FileSystemStats fileSystemStats; @Before public void setUp() { fileAndChunkRegion = new ConcurrentHashMap(); fileSystemStats = mock(FileSystemStats.class); system = new FileSystem(fileAndChunkRegion, fileSystemStats); } /** * A test of reading and writing to a file. */ @Test public void testReadWriteBytes() throws Exception { long start = System.currentTimeMillis(); File file1 = system.createFile("testFile1"); assertEquals(0, file1.getLength()); OutputStream outputStream1 = file1.getOutputStream(); // Write some random data. Make sure it fills several chunks outputStream1.write(2); byte[] data = new byte[LARGE_CHUNK]; rand.nextBytes(data); outputStream1.write(data); outputStream1.write(44); outputStream1.close(); assertEquals(2 + LARGE_CHUNK, file1.getLength()); assertTrue(file1.getModified() >= start); // Append to the file with a new outputstream OutputStream outputStream2 = file1.getOutputStream(); outputStream2.write(123); byte[] data2 = new byte[SMALL_CHUNK]; rand.nextBytes(data2); outputStream2.write(data2); outputStream2.close(); assertEquals(3 + LARGE_CHUNK + SMALL_CHUNK, file1.getLength()); // Make sure we can read all of the data back and it matches InputStream is = file1.getInputStream(); assertEquals(2, is.read()); byte[] resultData = new byte[LARGE_CHUNK]; assertEquals(LARGE_CHUNK, is.read(resultData)); assertArrayEquals(data, resultData); assertEquals(44, is.read()); assertEquals(123, is.read()); // Test read to an offset Arrays.fill(resultData, (byte) 0); assertEquals(SMALL_CHUNK, is.read(resultData, 50, SMALL_CHUNK)); // Make sure the data read matches byte[] expectedData = new byte[LARGE_CHUNK]; Arrays.fill(expectedData, (byte) 0); System.arraycopy(data2, 0, expectedData, 50, data2.length); assertArrayEquals(expectedData, resultData); assertEquals(-1, is.read()); assertEquals(-1, is.read(data)); is.close(); // Test the skip interface is = file1.getInputStream(); is.skip(LARGE_CHUNK + 3); Arrays.fill(resultData, (byte) 0); assertEquals(SMALL_CHUNK, is.read(resultData)); Arrays.fill(expectedData, (byte) 0); System.arraycopy(data2, 0, expectedData, 0, data2.length); assertArrayEquals(expectedData, resultData); assertEquals(-1, is.read()); } /** * A test of cloning a a FileInputStream. The clone should start from where the original was * positioned, but they should not hurt each other. */ @Test public void testCloneReader() throws Exception { File file1 = system.createFile("testFile1"); byte[] data = writeRandomBytes(file1); SeekableInputStream in = file1.getInputStream(); // Read to partway through the file byte[] results1 = new byte[data.length]; in.read(results1, 0, SMALL_CHUNK); // Clone the input stream. Both copies should // now be positioned partway through the file. SeekableInputStream in2 = in.clone(); byte[] results2 = new byte[data.length]; // Fill in the beginning of results2 with the data that it missed // to make testing easier. System.arraycopy(data, 0, results2, 0, SMALL_CHUNK); // Read the rest of the file with both copies in2.read(results2, SMALL_CHUNK, data.length); in.read(results1, SMALL_CHUNK, data.length); // Both readers should have started from the same place // and copied the rest of the data from the file assertArrayEquals(data, results1); assertArrayEquals(data, results2); } /** * A test that skip can jump to the correct position in the stream */ @Test public void testSeek() throws Exception { File file = system.createFile("testFile1"); ByteArrayOutputStream expected = new ByteArrayOutputStream(); byte[] data = new byte[SMALL_CHUNK]; // Write multiple times to the file with a lot of small chunks while (expected.size() < FileSystem.CHUNK_SIZE + 1) { rand.nextBytes(data); expected.write(data); writeBytes(file, data); } byte[] expectedBytes = expected.toByteArray(); assertContents(expectedBytes, file); // Assert that there are only 2 chunks in the system, since we wrote just // past the end of the first chunk. assertEquals(2, numberOfChunks(fileAndChunkRegion)); SeekableInputStream in = file.getInputStream(); // Seek to several positions in the first chunk checkByte(5, in, expectedBytes); checkByte(50, in, expectedBytes); checkByte(103, in, expectedBytes); checkByte(1, in, expectedBytes); // Seek back and forth between chunks checkByte(FileSystem.CHUNK_SIZE + 2, in, expectedBytes); checkByte(23, in, expectedBytes); checkByte(FileSystem.CHUNK_SIZE + 10, in, expectedBytes); checkByte(1023, in, expectedBytes); // Read the remaining data after a seek in.seek(10); byte[] results = new byte[expectedBytes.length]; // Fill in the initial 10 bytes with the expected value System.arraycopy(expectedBytes, 0, results, 0, 10); assertEquals(results.length - 10, in.read(results, 10, results.length - 10)); assertEquals(-1, in.read()); assertArrayEquals(expectedBytes, results); } private void checkByte(int i, SeekableInputStream in, byte[] expectedBytes) throws IOException { in.seek(i); byte result = (byte) in.read(); assertEquals(expectedBytes[i], result); } /** * Test basic file operations - rename, delete, listFiles. */ @Test public void testFileOperations() throws Exception { String name1 = "testFile1"; File file1 = system.createFile(name1); byte[] file1Data = writeRandomBytes(file1); String name2 = "testFile2"; File file2 = system.createFile(name2); byte[] file2Data = writeRandomBytes(file2); file1 = system.getFile(name1); file2 = system.getFile(name2); assertEquals(Arrays.asList(name1, name2), system.listFileNames()); assertContents(file1Data, file1); assertContents(file2Data, file2); try { system.renameFile(name1, name2); fail("Should have received an exception"); } catch (IOException expected) { } assertEquals(Arrays.asList(name1, name2), system.listFileNames()); assertContents(file1Data, file1); assertContents(file2Data, file2); String name3 = "testFile3"; system.renameFile(name1, name3); File file3 = system.getFile(name3); assertEquals(Arrays.asList(name3, name2), system.listFileNames()); assertContents(file1Data, file3); assertContents(file2Data, file2); system.deleteFile(name2); assertEquals(Arrays.asList(name3), system.listFileNames()); system.renameFile(name3, name2); assertEquals(Arrays.asList(name2), system.listFileNames()); file2 = system.getFile(name2); assertContents(file1Data, file2); } /** * Test what happens if you have an unclosed stream and you create a new file. */ @Test public void testUnclosedStreamSmallFile() throws Exception { doUnclosedStream(SMALL_CHUNK); } /** * Test what happens if you have an unclosed stream and you create a new file. */ @Test public void testUnclosedStreamLargeFile() throws Exception { doUnclosedStream(LARGE_CHUNK); } private void doUnclosedStream(int size) throws IOException { String name1 = "testFile1"; File file1 = system.createFile(name1); byte[] bytes = getRandomBytes(size); file1.getOutputStream().write(bytes); FileSystem system2 = new FileSystem(fileAndChunkRegion, fileSystemStats); File file = system2.getFile(name1); assertTrue(file.getLength() <= bytes.length); long length = file.getLength(); byte[] results = new byte[bytes.length]; if (length == 0) { assertEquals(-1, file.getInputStream().read(results)); assertEquals(0, numberOfChunks(fileAndChunkRegion)); } else { // Make sure the amount of data we can read matches the length assertEquals(length, file.getInputStream().read(results)); if (length != bytes.length) { Arrays.fill(bytes, (int) length, bytes.length, (byte) 0); } assertArrayEquals(bytes, results); } } /** * Test what happens a file rename is aborted in the middle due to the a cache closed exception. * The next member that uses those files should be able to clean up after the partial rename. */ @Test public void testPartialRename() throws Exception { final CountOperations countOperations = new CountOperations(); // Create a couple of mock regions where we count the operations // that happen to them. We will then use this to abort the rename // in the middle. ConcurrentHashMap spyFileAndChunkRegion = mock(ConcurrentHashMap.class, new SpyWrapper(countOperations, fileAndChunkRegion)); system = new FileSystem(spyFileAndChunkRegion, fileSystemStats); String name = "file"; File file = system.createFile(name); ByteArrayOutputStream expected = new ByteArrayOutputStream(); // Make sure the file has a lot of chunks for (int i = 0; i < 10; i++) { expected.write(writeRandomBytes(file)); } String name2 = "file2"; countOperations.reset(); system.renameFile(name, name2); // Right now the number of operations is 4.. except if run through a debugger... assertTrue(4 <= countOperations.count); // This number of operations during a rename actually needs to get to the "putIfAbsent" for the // Assertion to be correct. Right now the number of operations is actually 3 so the limit needs // to be 3... countOperations.after((int) Math.ceil(countOperations.count / 2.0 + 1), new Runnable() { @Override public void run() { throw new CacheClosedException(); } }); String name3 = "file3"; countOperations.reset(); try { system.renameFile(name2, name3); fail("should have seen an error"); } catch (CacheClosedException expectedException) { } system = new FileSystem(fileAndChunkRegion, fileSystemStats); // This is not the ideal behavior. We are left // with two duplicate files. However, we will still // verify that neither file is corrupted. assertEquals(2, system.listFileNames().size()); File sourceFile = system.getFile(name2); File destFile = system.getFile(name3); byte[] expectedBytes = expected.toByteArray(); assertContents(expectedBytes, sourceFile); assertContents(expectedBytes, destFile); } @Test public void testExport() throws IOException { String name1 = "testFile1"; File file1 = system.createFile(name1); byte[] file1Data = writeRandomBytes(file1); String name2 = "testFile2"; File file2 = system.createFile(name2); byte[] file2Data = writeRandomBytes(file2); java.io.File parentDir = tempFolderRule.getRoot(); system.export(parentDir); String[] foundFiles = parentDir.list(); Arrays.sort(foundFiles); assertArrayEquals(new String[] {"testFile1", "testFile2"}, foundFiles); assertExportedFileContents(file1Data, new java.io.File(parentDir, "testFile1")); assertExportedFileContents(file2Data, new java.io.File(parentDir, "testFile2")); } @Test public void testIncrementFileCreates() throws IOException { File file = system.createFile("file"); verify(fileSystemStats).incFileCreates(1); } @Test public void testIncrementFileDeletes() throws IOException { File file = system.createFile("file"); system.deleteFile("file"); verify(fileSystemStats).incFileDeletes(1); } @Test public void testIncrementFileRenames() throws IOException { File file = system.createFile("file"); system.renameFile("file", "dest"); verify(fileSystemStats).incFileRenames(1); } @Test public void testIncrementTemporaryFileCreates() throws IOException { File file = system.createTemporaryFile("file"); verify(fileSystemStats).incTemporaryFileCreates(1); } @Test public void testIncrementWrittenBytes() throws IOException { File file = system.createTemporaryFile("file"); final byte[] bytes = writeRandomBytes(file); ArgumentCaptor<Integer> captor = ArgumentCaptor.forClass(Integer.class); verify(fileSystemStats, atLeast(1)).incWrittenBytes(captor.capture()); final int actualByteCount = captor.getAllValues().stream().mapToInt(Integer::intValue).sum(); assertEquals(bytes.length, actualByteCount); } @Test public void testIncrementReadBytes() throws IOException { File file = system.createTemporaryFile("file"); final byte[] bytes = writeRandomBytes(file); file.getInputStream().read(bytes); ArgumentCaptor<Integer> captor = ArgumentCaptor.forClass(Integer.class); verify(fileSystemStats, atLeast(1)).incReadBytes(captor.capture()); final int actualByteCount = captor.getAllValues().stream().mapToInt(Integer::intValue).sum(); assertEquals(bytes.length, actualByteCount); } @Test public void testDeletePossiblyRenamedFileDoesNotDestroyChunks() throws Exception { ConcurrentHashMap spyFileRegion = Mockito.spy(fileAndChunkRegion); system = new FileSystem(spyFileRegion, fileSystemStats); String sourceFileName = "sourceFile"; File file1 = system.createFile(sourceFileName); byte[] data = writeRandomBytes(file1); Mockito.doReturn(file1).when(spyFileRegion).remove(any()); String destFileName = "destFile"; system.renameFile(sourceFileName, destFileName); File destFile = system.getFile(destFileName); assertNotNull(system.getFile(sourceFileName)); system.deleteFile(sourceFileName); assertNotNull(system.getChunk(destFile, 0)); } private void assertExportedFileContents(final byte[] expected, final java.io.File exportedFile) throws IOException { byte[] actual = Files.readAllBytes(exportedFile.toPath()); assertArrayEquals(expected, actual); } private void assertContents(byte[] data, File file) throws IOException { assertEquals(data.length, file.getLength()); InputStream is = file.getInputStream(); if (data.length == 0) { assertEquals(-1, is.read()); return; } byte[] results = new byte[data.length]; assertEquals(file.getLength(), is.read(results)); assertEquals(-1, is.read()); is.close(); assertArrayEquals(data, results); } private byte[] writeRandomBytes(File file) throws IOException { byte[] file1Data = getRandomBytes(); writeBytes(file, file1Data); return file1Data; } private void writeBytes(File file, byte[] data) throws IOException { OutputStream outputStream = file.getOutputStream(); outputStream.write(data); outputStream.close(); } private byte[] getRandomBytes() { return getRandomBytes(rand.nextInt(LARGE_CHUNK) + SMALL_CHUNK); } private byte[] getRandomBytes(int length) { byte[] data = new byte[length]; rand.nextBytes(data); return data; } private long numberOfChunks(Map map) { return map.keySet().parallelStream().filter(k -> (k instanceof ChunkKey)).count(); } /** * A wrapper around an object that will also invoke a callback before applying an operation. * * This is essentially like Mockito.spy(), except that it allows the implementation of a default * answer for all operations. * * To use, do this Mockito.mock(Interface, new SpyWrapper(Answer, o) */ private static class SpyWrapper implements Answer<Object> { private final CountOperations countOperations; private Object region; private SpyWrapper(CountOperations countOperations, Object region) { this.countOperations = countOperations; this.region = region; } @Override public Object answer(InvocationOnMock invocation) throws Throwable { countOperations.answer(invocation); Method m = invocation.getMethod(); return m.invoke(region, invocation.getArguments()); } } private static class CountOperations implements Answer { public int count; private int limit = Integer.MAX_VALUE; private Runnable limitAction; @Override public Object answer(InvocationOnMock invocation) throws Throwable { count++; if (count > limit) { limitAction.run(); } return null; } public void reset() { count = 0; } public void after(int i, Runnable runnable) { limit = i; limitAction = runnable; } } }
/* * Copyright (c) 2015 TextGlass * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ import java.util.ArrayList; import java.util.List; import org.codehaus.jackson.JsonNode; public class Pattern { private final String patternId; private final PatternType patternType; private final List<String> patternTokens; private final RankType rankType; private final int rankValue; private enum PatternType { Simple, SimpleAnd, SimpleOrderedAnd }; private enum RankType { Strong, Weak, None }; public Pattern(JsonNode json) throws Exception { //PARSE PATTERN JSON if(json.get("patternId") == null || json.get("patternId").asText().isEmpty()) { throw new Exception("patternId not found"); } patternId = json.get("patternId").asText(); if(JsonFile.empty(json, "patternType")) { throw new Exception("patternType not found in " + patternId); } if(!JsonFile.get(json, "patternTokens").isArray()) { throw new Exception("patternTokens array not found in " + patternId); } if(JsonFile.empty(json, "rankType")) { throw new Exception("rankType not found in " + patternId); } patternType = PatternType.valueOf(json.get("patternType").asText()); rankType = RankType.valueOf(json.get("rankType").asText()); String rankValueStr = (json.get("rankValue") != null ? json.get("rankValue").asText() : "0"); rankValue = Integer.parseInt(rankValueStr); if(rankValue > 1000 || rankValue < -1000) { throw new Exception("Invalid rankValue in " + patternId + ": " + rankValue); } if(rankType.equals(RankType.Strong) && rankValue != 0) { throw new Exception("Strong patterns cannot have a rankValue"); } patternTokens = new ArrayList<>(); for(int i = 0; i < json.get("patternTokens").size(); i++) { JsonNode patternToken = json.get("patternTokens").get(i); if(patternToken.asText().isEmpty()) { throw new Exception("Empty patternToken in " + patternId); } patternTokens.add(patternToken.asText()); } if(patternTokens.isEmpty()) { throw new Exception("No patternTokens found for: " + patternId); } if(patternTokens.size() < 2 && (patternType.equals(PatternType.SimpleAnd) || patternType.equals(PatternType.SimpleOrderedAnd))) { throw new Exception("patternType " + patternType + " requires more than 1 patternToken: " + patternTokens); } } //IS PATTERN VALID FOR MATCHED TOKENS public boolean isValid(List<String> matchedTokens) { int lastFound = -1; for(String patternToken : patternTokens) { int found = matchedTokens.indexOf(patternToken); if(found == -1 && (isSimpleAnd() || isSimpleOrderedAnd())) { return false; } if(found >= 0 && isSimple()) { return true; } if(isSimpleOrderedAnd()) { if(found <= lastFound) { return false; } else { lastFound = found; } } } if(isSimple()) { return false; } else { return true; } } //RANK COMPARED TO OTHER PATTERNS public long getRank() { long rank = rankValue; if(isWeak()) { rank += 100000; } else if(isStrong()) { return 10000000; } return rank; } //LENGTH OF MATCHED TOKENS, FOR RANKING public long getMatchedLength(List<String> matchedTokens) { int length = 0; for(String patternToken : patternTokens) { int found = matchedTokens.indexOf(patternToken); if(found >= 0) { length += patternToken.length(); } } return length; } @Override public String toString() { return patternId; } public String toStringRank(List<String> matchedTokens) { return patternId + "(" + getRank() + "," + getMatchedLength(matchedTokens) + ")"; } public String toStringFull() { return "patternId: " + patternId + ", patternType: " + patternType + patternTokens + ", rankType: " + rankType + ":" + rankValue; } public String getPatternId() { return patternId; } public List<String> getPatternTokens() { return patternTokens; } public boolean isStrong() { return rankType.equals(RankType.Strong); } public boolean isWeak() { return rankType.equals(RankType.Weak); } public boolean isNone() { return rankType.equals(RankType.None); } public boolean isSimple() { return patternType.equals(PatternType.Simple); } public boolean isSimpleAnd() { return patternType.equals(PatternType.SimpleAnd); } public boolean isSimpleOrderedAnd() { return patternType.equals(PatternType.SimpleOrderedAnd); } }
/* * Copyright 2006-2012 ICEsoft Technologies Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language * governing permissions and limitations under the License. */ package org.icepdf.core.pobjects.graphics; import org.icepdf.core.util.Defs; import org.icepdf.core.util.Library; import java.awt.*; import java.util.Hashtable; /** * Device CMYK colour space definitions. The primary purpose of this colour * space is to convert cymk colours to rgb. No ICC profile is used in this * process and the generated rgb colour is just and approximation. */ public class DeviceCMYK extends PColorSpace { public static double cmykBlackRatio; static { // decide if large images will be scaled cmykBlackRatio = Defs.sysPropertyDouble("org.icepdf.core.color.cmyk.black", 3.0f); } DeviceCMYK(Library l, Hashtable h) { super(l, h); } public int getNumComponents() { return 4; } /** * Converts a 4 component cmyk colour to rgb. With out a valid ICC colour * profile this is just an approximation. * * @param f 4 component values of the cmyk, assumes compoents between * 0.0 and 1.0 * @return valid rgb colour object. */ public Color getColor(float[] f) { return alternative2(f); } /** * Ah yes the many possible ways to go from cmyk to rgb. Everybody has * an opinion but no one has the solution that is 100% */ /** * @param f 4 component values of the cmyk, assumes compoents between * 0.0 and 1.0 * @return valid rgb colour object. */ private static Color alternative1(float[] f) { float c = f[3]; float m = f[2]; float y = f[1]; float k = f[0]; float r = 1.0f - Math.min(1.0f, c + k); float g = 1.0f - Math.min(1.0f, m + k); float b = 1.0f - Math.min(1.0f, y + k); return new Color(r, g, b); } /** * @param f 4 component values of the cmyk, assumes components between * 0.0 and 1.0 * @return valid rgb colour object. */ private static Color alternative3(float[] f) { float c = f[3]; float m = f[2]; float y = f[1]; float k = f[0]; float r = 1.0f - Math.min(1.0f, (c * (1 - k)) + k); float g = 1.0f - Math.min(1.0f, (m * (1 - k)) + k); float b = 1.0f - Math.min(1.0f, (y * (1 - k)) + k); return new Color(r, g, b); } /** * Auto cad color model * var R=Math.round((1-C)*(1-K)*255); * var B=Math.round((1-Y)*(1-K)*255); * var G=Math.round((1-M)*(1-K)*255); * * @param f 4 component values of the cmyk, assumes compoents between * 0.0 and 1.0 * @return valid rgb colour object. */ private static Color getAutoCadColor(float[] f) { float c = f[3]; float m = f[2]; float y = f[1]; float k = f[0]; int red = Math.round((1.0f - c) * (1.0f - k) * 255); int blue = Math.round((1.0f - y) * (1.0f - k) * 255); int green = Math.round((1.0f - m) * (1.0f - k) * 255); return new Color(red, green, blue); } /** * GNU Ghost Script algorithm or so they say. * <p/> * rgb[0] = colors * (255 - cyan)/255; * rgb[1] = colors * (255 - magenta)/255; * rgb[2] = colors * (255 - yellow)/255; * * @param f 4 component values of the cmyk, assumes compoents between * 0.0 and 1.0 * @return valid rgb colour object. */ private static Color getGhostColor(float[] f) { int cyan = (int) (f[3] * 255); int magenta = (int) (f[2] * 255); int yellow = (int) (f[1] * 255); int black = (int) (f[0] * 255); float colors = 255 - black; float[] rgb = new float[3]; rgb[0] = colors * (255 - cyan) / 255; rgb[1] = colors * (255 - magenta) / 255; rgb[2] = colors * (255 - yellow) / 255; return new Color((int) rgb[0], (int) rgb[1], (int) rgb[2]); } /** * Adobe photo shop algorithm or so they say. * <p/> * cyan = Math.min(255, cyan + black); //black is from K * magenta = Math.min(255, magenta + black); * yellow = Math.min(255, yellow + black); * rgb[0] = 255 - cyan; * rgb[1] = 255 - magenta; * rgb[2] = 255 - yellow; * * @param f 4 component values of the cmyk, assumes compoents between * 0.0 and 1.0 * @return valid rgb colour object. */ private static Color getAdobeColor(float[] f) { int cyan = (int) (f[3] * 255); int magenta = (int) (f[2] * 255); int yellow = (int) (f[1] * 255); int black = (int) (f[0] * 255); cyan = Math.min(255, cyan + black); //black is from K magenta = Math.min(255, magenta + black); yellow = Math.min(255, yellow + black); int[] rgb = new int[3]; rgb[0] = 255 - cyan; rgb[1] = 255 - magenta; rgb[2] = 255 - yellow; return new Color(rgb[0], rgb[1], rgb[2]); } /** * Current runner for conversion that looks closest to acrobat. * The algorithm is a little expensive but it does the best approximation. * * @param f 4 component values of the cmyk, assumes compoents between * 0.0 and 1.0 * @return valid rgb colour object. */ private static Color alternative2(float[] f) { float inCyan = f[3]; float inMagenta = f[2]; float inYellow = f[1]; float inBlack = f[0]; // soften the amount of black, but exclude explicit black colorant. if (inCyan != 0 && inMagenta != 0 && inYellow != 0){ inBlack /= cmykBlackRatio; } double c, m, y, aw, ac, am, ay, ar, ag, ab; c = clip(0.0, 1.0, inCyan + inBlack); m = clip(0.0, 1.0, inMagenta + inBlack); y = clip(0.0, 1.0, inYellow + inBlack); aw = (1 - c) * (1 - m) * (1 - y); ac = c * (1 - m) * (1 - y); am = (1 - c) * m * (1 - y); ay = (1 - c) * (1 - m) * y; ar = (1 - c) * m * y; ag = c * (1 - m) * y; ab = c * m * (1 - y); float outRed = (float) clip(0.0, 1.0, aw + 0.9137 * am + 0.9961 * ay + 0.9882 * ar); float outGreen = (float) clip(0.0, 1.0, aw + 0.6196 * ac + ay + 0.5176 * ag); float outBlue = (float) clip(0.0, 1.0, aw + 0.7804 * ac + 0.5412 * am + 0.0667 * ar + 0.2118 * ag + 0.4863 * ab); return new Color(outRed, outGreen, outBlue); } /** * Clips the value according to the specified floor and ceiling. * @param floor floor value of clip * @param ceiling ceiling value of clip * @param value value to clip. * @return clipped value. */ private static double clip(double floor, double ceiling, double value) { if (value < floor){ value = floor; } if (value > ceiling){ value = ceiling; } return value; } }
/* * Copyright 2015 Karlsruhe Institute of Technology. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.kit.dama.ui.admin.login; import com.vaadin.ui.Alignment; import com.vaadin.ui.CustomComponent; import com.vaadin.ui.GridLayout; import com.vaadin.ui.PasswordField; import com.vaadin.ui.TextField; import com.vaadin.ui.VerticalLayout; import edu.kit.dama.authorization.entities.GroupId; import edu.kit.dama.authorization.entities.IAuthorizationContext; import edu.kit.dama.authorization.entities.Role; import edu.kit.dama.authorization.entities.UserId; import edu.kit.dama.authorization.entities.impl.AuthorizationContext; import edu.kit.dama.authorization.exceptions.EntityAlreadyExistsException; import edu.kit.dama.authorization.exceptions.EntityNotFoundException; import edu.kit.dama.authorization.exceptions.UnauthorizedAccessAttemptException; import edu.kit.dama.authorization.services.administration.GroupServiceLocal; import edu.kit.dama.authorization.services.administration.UserServiceLocal; import edu.kit.dama.ui.admin.exception.UserRegistrationException; import edu.kit.dama.mdm.core.IMetaDataManager; import edu.kit.dama.mdm.core.MetaDataManagement; import edu.kit.dama.mdm.admin.ServiceAccessToken; import edu.kit.dama.mdm.admin.exception.SecretEncryptionException; import edu.kit.dama.mdm.base.UserData; import edu.kit.dama.ui.admin.exception.RegistrationAbortedException; import edu.kit.dama.ui.commons.util.UIUtils7; import edu.kit.dama.util.Constants; import java.util.Date; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * * @author mf6319 */ public class RegistrationFormView extends CustomComponent { private static final Logger LOGGER = LoggerFactory.getLogger(RegistrationFormView.class); private final TextField screenName = UIUtils7.factoryTextField("Screen Name", "Insert your screen name", "100%", true, 3, 255); private final TextField firstName = UIUtils7.factoryTextField("First Name", "Insert your first name", "100%", true, 3, 255); private final TextField lastName = UIUtils7.factoryTextField("Last Name", "Insert your last name", "100%", true, 3, 255); private final TextField email = UIUtils7.factoryTextField("Email", "Insert your email", "100%", true, 3, 255); private final PasswordField password = UIUtils7.factoryPasswordField("Password", "100%", true, 3, 255); private final PasswordField passwordVerify = UIUtils7.factoryPasswordField("Repeat Password", "100%", true, 3, 255); private VerticalLayout mainLayout; private GridLayout registerForm; public RegistrationFormView() { initComponent(); setCompositionRoot(mainLayout); } public void loadFromUserData(UserData template) { if (template != null) { screenName.setValue(template.getDistinguishedName()); firstName.setValue(template.getFirstName()); lastName.setValue(template.getLastName()); email.setValue(template.getEmail()); } else { screenName.setValue(""); firstName.setValue(""); lastName.setValue(""); email.setValue(""); } } /** * Initialize the user interface components. */ private void initComponent() { screenName.setRequired(true); email.setRequired(true); email.setNullRepresentation(""); registerForm = new UIUtils7.GridLayoutBuilder(2, 5). addComponent(screenName, 0, 0, 2, 1). addComponent(lastName, 0, 1, 1, 1).addComponent(firstName, 1, 1, 1, 1). addComponent(email, 0, 2, 2, 1). addComponent(password, 0, 3, 2, 1). addComponent(passwordVerify, 0, 4, 2, 1). getLayout(); registerForm.setSpacing(true); registerForm.setMargin(false); registerForm.setSizeFull(); mainLayout = new VerticalLayout(registerForm); mainLayout.setComponentAlignment(registerForm, Alignment.MIDDLE_CENTER); mainLayout.setSizeFull(); } /** * Do the user registration including all validation steps. * * @return the registered user if registration was successful. * * @throws UserRegistrationException If validating the provided information * fails. * @throws RegistrationAbortedException If the registration fails after a * user has been created. */ public UserData register() throws UserRegistrationException, RegistrationAbortedException { if (!UIUtils7.validate(registerForm) || email.getValue() == null || screenName.getValue() == null) { //something is wrong...show error throw new UserRegistrationException("Form validation failed. Please check all fields again."); } UserId userScreenName = new UserId(screenName.getValue()); String userMail = email.getValue().trim(); String userFirstName = firstName.getValue().trim(); String userLastName = lastName.getValue().trim(); String userPassword1 = password.getValue().trim(); String userPassword2 = passwordVerify.getValue().trim(); IAuthorizationContext ctx = AuthorizationContext.factorySystemContext(); UserData template = new UserData(); template.setEmail(userMail); IMetaDataManager mdm = MetaDataManagement.getMetaDataManagement().getMetaDataManager(); mdm.setAuthorizationContext(ctx); try { try { List<UserData> existing = mdm.find(template, template); if (!existing.isEmpty()) { throw new UserRegistrationException("The email address " + userMail + " is already used."); } } catch (UnauthorizedAccessAttemptException ex) { throw new UserRegistrationException("Failed to check email " + userMail + ". Please try again later."); } if (userPassword1 == null || userPassword2 == null) { throw new UserRegistrationException("Please provide a password and its confirmation."); } if (!userPassword1.equals(userPassword2)) { throw new UserRegistrationException("Passwords are not equal."); } //save authorization information try { UserServiceLocal.getSingleton().register(userScreenName, Role.MANAGER, ctx); } catch (UnauthorizedAccessAttemptException e) { LOGGER.error("Failed to register new user for screenName '" + screenName + "'.", e); throw new UserRegistrationException("Failed to register new user for screenName " + screenName + ".", e); } catch (EntityAlreadyExistsException e) { LOGGER.error("User with screenName '" + screenName + "' already exists.", e); throw new UserRegistrationException("User with screenName " + userScreenName + " already exists."); } try { GroupServiceLocal.getSingleton().addUser(new GroupId(Constants.USERS_GROUP_ID), userScreenName, Role.MEMBER, ctx); } catch (EntityNotFoundException | UnauthorizedAccessAttemptException | EntityAlreadyExistsException e) { LOGGER.error("Failed to add user with screen name '" + userScreenName + "' to group USERS.", e); handleRegistrationError(userScreenName, "Failed to add user with screenName '" + userScreenName + "' to default group 'USERS'."); } //save user metadata template.setFirstName(userFirstName); template.setLastName(userLastName); template.setValidFrom(new Date()); template.setDistinguishedName(userScreenName.getStringRepresentation()); try { template = mdm.save(template); } catch (UnauthorizedAccessAttemptException e) { LOGGER.error("Failed to create userdata entity for user with screen name '" + userScreenName + "'.", e); handleRegistrationError(userScreenName, "Failed to save user properties."); } //save password entity try { ServiceAccessToken token = new ServiceAccessToken(userScreenName.getStringRepresentation(), Constants.MAIN_LOGIN_SERVICE_ID); token.setTokenKey(userMail); token.setSecret(userPassword1); mdm.save(token); } catch (UnauthorizedAccessAttemptException | SecretEncryptionException e) { LOGGER.error("Failed to store service access token for user with screen name '" + userScreenName + "'.", e); handleRegistrationError(userScreenName, "Failed to save login information."); } } finally { mdm.close(); } return template; } /** * Handle fatal registration errors which occur after an authorization * userId has been added. In this case, the according user is disabled and * an exception is thrown. To re-activate the user, manual steps must be * performed. * * @param pUserId The userId for which the registration failed. * @param pErrorMessage The error message that will be part of the thrown * exception * * @throws RegistrationAbortedException The exception containing * pErrorMessage. */ private void handleRegistrationError(UserId pUserId, String pErrorMessage) throws RegistrationAbortedException { try { UserServiceLocal.getSingleton().setRoleRestriction(pUserId, Role.NO_ACCESS, AuthorizationContext.factorySystemContext()); } catch (UnauthorizedAccessAttemptException | EntityNotFoundException ex) { LOGGER.error("Failed to disable user with id '" + pUserId + "'.", ex); throw new RegistrationAbortedException(pErrorMessage, ex); } } }
package com.mikepenz.lollipopshowcase; import android.content.Intent; import android.content.SharedPreferences; import android.content.pm.ResolveInfo; import android.graphics.Color; import android.os.AsyncTask; import android.os.Bundle; import android.support.v4.app.ActivityOptionsCompat; import android.support.v4.util.Pair; import android.support.v4.widget.DrawerLayout; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.ActionBarActivity; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.view.View; import android.widget.CompoundButton; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.Switch; import com.mikepenz.aboutlibraries.Libs; import com.mikepenz.iconics.IconicsDrawable; import com.mikepenz.iconics.typeface.FontAwesome; import com.mikepenz.lollipopshowcase.adapter.ApplicationAdapter; import com.mikepenz.lollipopshowcase.entity.AppInfo; import com.mikepenz.lollipopshowcase.itemanimator.CustomItemAnimator; import com.mikepenz.lollipopshowcase.util.UploadHelper; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class MainActivity extends ActionBarActivity { private List<AppInfo> applicationList = new ArrayList<AppInfo>(); private ApplicationAdapter mAdapter; private ImageButton mFabButton; private RecyclerView mRecyclerView; private SwipeRefreshLayout mSwipeRefreshLayout; private ProgressBar mProgressBar; private static UploadHelper.UploadComponentInfoTask uploadComponentInfoTask = null; @Override protected void onCreate(Bundle savedInstanceState) { //supportRequestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // Set explode animation when enter and exit the activity //Utils.configureWindowEnterExitTransition(getWindow()); // Handle Toolbar Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); getSupportActionBar().setDisplayHomeAsUpEnabled(true); // Handle DrawerLayout DrawerLayout mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer); // Handle ActionBarDrawerToggle ActionBarDrawerToggle actionBarDrawerToggle = new ActionBarDrawerToggle(this, mDrawerLayout, toolbar, R.string.drawer_open, R.string.drawer_close); actionBarDrawerToggle.syncState(); // Handle different Drawer States :D mDrawerLayout.setDrawerListener(actionBarDrawerToggle); // Handle DrawerList LinearLayout mDrawerList = (LinearLayout) findViewById(R.id.drawerList); // Handle ProgressBar mProgressBar = (ProgressBar) findViewById(R.id.progressBar); // Init DrawerElems NOTE Just don't do this in a live app :D final SharedPreferences pref = getSharedPreferences("com.mikepenz.applicationreader", 0); ((Switch) mDrawerList.findViewById(R.id.drawer_autoupload)).setChecked(pref.getBoolean("autouploadenabled", false)); ((Switch) mDrawerList.findViewById(R.id.drawer_autoupload)).setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { SharedPreferences.Editor editor = pref.edit(); editor.putBoolean("autouploadenabled", isChecked); editor.apply(); } }); mDrawerList.findViewById(R.id.drawer_opensource).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { new Libs.Builder() .withFields(R.string.class.getFields()) .withVersionShown(true) .withLicenseShown(true) .withActivityTitle(getString(R.string.drawer_opensource)) .withActivityTheme(R.style.AboutTheme) .start(MainActivity.this); } }); ((ImageView) mDrawerList.findViewById(R.id.drawer_opensource_icon)).setImageDrawable(new IconicsDrawable(this, FontAwesome.Icon.faw_github).colorRes(R.color.secondary).actionBarSize()); // Fab Button mFabButton = (ImageButton) findViewById(R.id.fab_button); mFabButton.setImageDrawable(new IconicsDrawable(this, FontAwesome.Icon.faw_upload).color(Color.WHITE).actionBarSize()); mFabButton.setOnClickListener(fabClickListener); Utils.configureFab(mFabButton); mRecyclerView = (RecyclerView) findViewById(R.id.list); mRecyclerView.setLayoutManager(new LinearLayoutManager(this)); mRecyclerView.setItemAnimator(new CustomItemAnimator()); //mRecyclerView.setItemAnimator(new ReboundItemAnimator()); mAdapter = new ApplicationAdapter(new ArrayList<AppInfo>(), R.layout.row_application, MainActivity.this); mRecyclerView.setAdapter(mAdapter); mSwipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.swipe_container); mSwipeRefreshLayout.setColorSchemeColors(getResources().getColor(R.color.theme_accent)); mSwipeRefreshLayout.setRefreshing(true); mSwipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() { @Override public void onRefresh() { new InitializeApplicationsTask().execute(); } }); new InitializeApplicationsTask().execute(); if (savedInstanceState != null) { if (uploadComponentInfoTask != null) { if (uploadComponentInfoTask.isRunning) { uploadComponentInfoTask.showProgress(this); } } } //show progress mRecyclerView.setVisibility(View.GONE); mProgressBar.setVisibility(View.VISIBLE); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); } View.OnClickListener fabClickListener = new View.OnClickListener() { @Override public void onClick(View view) { uploadComponentInfoTask = UploadHelper.getInstance(MainActivity.this, applicationList).uploadAll(); } }; public void animateActivity(AppInfo appInfo, View appIcon) { Intent i = new Intent(this, DetailActivity.class); i.putExtra("appInfo", appInfo.getComponentName()); ActivityOptionsCompat transitionActivityOptions = ActivityOptionsCompat.makeSceneTransitionAnimation(this, Pair.create((View) mFabButton, "fab"), Pair.create(appIcon, "appIcon")); startActivity(i, transitionActivityOptions.toBundle()); } private class InitializeApplicationsTask extends AsyncTask<Void, Void, Void> { @Override protected void onPreExecute() { mAdapter.clearApplications(); super.onPreExecute(); } @Override protected Void doInBackground(Void... params) { applicationList.clear(); //Query the applications final Intent mainIntent = new Intent(Intent.ACTION_MAIN, null); mainIntent.addCategory(Intent.CATEGORY_LAUNCHER); List<ResolveInfo> ril = getPackageManager().queryIntentActivities(mainIntent, 0); for (ResolveInfo ri : ril) { applicationList.add(new AppInfo(MainActivity.this, ri)); } Collections.sort(applicationList); for (AppInfo appInfo : applicationList) { //load icons before shown. so the list is smoother appInfo.getIcon(); } return null; } @Override protected void onPostExecute(Void result) { //handle visibility mRecyclerView.setVisibility(View.VISIBLE); mProgressBar.setVisibility(View.GONE); //set data for list mAdapter.addApplications(applicationList); mSwipeRefreshLayout.setRefreshing(false); super.onPostExecute(result); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.java.util.emitter.core; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Predicate; import com.google.common.primitives.Ints; import io.netty.handler.codec.http.HttpHeaders; import org.apache.druid.concurrent.ConcurrentAwaitableCounter; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.RetryUtils; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.lifecycle.LifecycleStart; import org.apache.druid.java.util.common.lifecycle.LifecycleStop; import org.apache.druid.java.util.common.logger.Logger; import org.asynchttpclient.AsyncHttpClient; import org.asynchttpclient.ListenableFuture; import org.asynchttpclient.RequestBuilder; import org.asynchttpclient.Response; import javax.annotation.Nullable; import java.io.Closeable; import java.io.Flushable; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayDeque; import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.LockSupport; import java.util.zip.GZIPOutputStream; public class HttpPostEmitter implements Flushable, Closeable, Emitter { private static final int MAX_EVENT_SIZE = 1023 * 1024; // Set max size slightly less than 1M to allow for metadata private static final int MAX_SEND_RETRIES = 3; /** * Threshold of the size of {@link #buffersToEmit} when switch from using {@link * BaseHttpEmittingConfig#getHttpTimeoutAllowanceFactor()} to {@link #EQUILIBRIUM_ALLOWANCE_FACTOR} */ private static final int EMIT_QUEUE_THRESHOLD_1 = 5; /** * Threshold of the size of {@link #buffersToEmit} when switch from using {@link #EQUILIBRIUM_ALLOWANCE_FACTOR} * to {@link #TIGHT_ALLOWANCE_FACTOR}. */ private static final int EMIT_QUEUE_THRESHOLD_2 = 10; /** * 0.9 is to give room for unexpected latency or time out not being respected rigorously. */ private static final double EQUILIBRIUM_ALLOWANCE_FACTOR = 0.9; private static final double TIGHT_ALLOWANCE_FACTOR = 0.5; /** * Used in {@link EmittingThread#emitLargeEvents()} to ensure fair emitting of both large events and batched events. */ private static final byte[] LARGE_EVENTS_STOP = new byte[]{}; private static final Logger log = new Logger(HttpPostEmitter.class); private static final AtomicInteger INSTANCE_COUNTER = new AtomicInteger(); final BatchingStrategy batchingStrategy; final HttpEmitterConfig config; private final int bufferSize; final int maxBufferWatermark; private final int largeEventThreshold; private final AsyncHttpClient client; private final ObjectMapper jsonMapper; private final String url; private final ConcurrentLinkedQueue<byte[]> buffersToReuse = new ConcurrentLinkedQueue<>(); /** * "Approximate" because not exactly atomically synchronized with {@link #buffersToReuse} updates. {@link * ConcurrentLinkedQueue#size()} is not used, because it's O(n). */ private final AtomicInteger approximateBuffersToReuseCount = new AtomicInteger(); /** * concurrentBatch.get() == null means the service is closed. concurrentBatch.get() is the instance of Long (i. e. the * type of {@link Batch#batchNumber}), it means that some thread has failed with a serious error during {@link * #onSealExclusive} (with the batch number corresponding to the Long object) and {@link #tryRecoverCurrentBatch} * needs to be called. Otherwise (i. e. normally), an instance of {@link Batch} is stored in this atomic reference. */ private final AtomicReference<Object> concurrentBatch = new AtomicReference<>(); private final ConcurrentLinkedDeque<Batch> buffersToEmit = new ConcurrentLinkedDeque<>(); /** * See {@link #approximateBuffersToReuseCount} */ private final AtomicInteger approximateBuffersToEmitCount = new AtomicInteger(); /** * See {@link #approximateBuffersToReuseCount} */ private final AtomicLong approximateEventsToEmitCount = new AtomicLong(); private final ConcurrentLinkedQueue<byte[]> largeEventsToEmit = new ConcurrentLinkedQueue<>(); /** * See {@link #approximateBuffersToReuseCount} */ private final AtomicInteger approximateLargeEventsToEmitCount = new AtomicInteger(); private final ConcurrentAwaitableCounter emittedBatchCounter = new ConcurrentAwaitableCounter(); private final EmittingThread emittingThread; private final AtomicLong totalEmittedEvents = new AtomicLong(); private final AtomicInteger allocatedBuffers = new AtomicInteger(); private final AtomicInteger droppedBuffers = new AtomicInteger(); private volatile long lastBatchFillTimeMillis; private final ConcurrentTimeCounter batchFillingTimeCounter = new ConcurrentTimeCounter(); private final Object startLock = new Object(); private final CountDownLatch startLatch = new CountDownLatch(1); private boolean running = false; public HttpPostEmitter(HttpEmitterConfig config, AsyncHttpClient client) { this(config, client, new ObjectMapper()); } public HttpPostEmitter(HttpEmitterConfig config, AsyncHttpClient client, ObjectMapper jsonMapper) { batchingStrategy = config.getBatchingStrategy(); final int batchOverhead = batchingStrategy.batchStartLength() + batchingStrategy.batchEndLength(); Preconditions.checkArgument( config.getMaxBatchSize() >= MAX_EVENT_SIZE + batchOverhead, StringUtils.format( "maxBatchSize must be greater than MAX_EVENT_SIZE[%,d] + overhead[%,d].", MAX_EVENT_SIZE, batchOverhead ) ); this.config = config; this.bufferSize = config.getMaxBatchSize(); this.maxBufferWatermark = bufferSize - batchingStrategy.batchEndLength(); // Chosen so that if event size < largeEventThreshold, at least 2 events could fit the standard buffer. this.largeEventThreshold = (bufferSize - batchOverhead - batchingStrategy.separatorLength()) / 2; this.client = client; this.jsonMapper = jsonMapper; try { this.url = new URL(config.getRecipientBaseUrl()).toString(); } catch (MalformedURLException e) { throw new ISE(e, "Bad URL: %s", config.getRecipientBaseUrl()); } emittingThread = new EmittingThread(config); long firstBatchNumber = 1; concurrentBatch.set(new Batch(this, acquireBuffer(), firstBatchNumber)); // lastBatchFillTimeMillis must not be 0, minHttpTimeoutMillis could be. lastBatchFillTimeMillis = Math.max(config.minHttpTimeoutMillis, 1); } @Override @LifecycleStart public void start() { synchronized (startLock) { if (!running) { if (startLatch.getCount() == 0) { throw new IllegalStateException("Already started."); } running = true; startLatch.countDown(); emittingThread.start(); } } } private void awaitStarted() { try { if (!startLatch.await(1, TimeUnit.SECONDS)) { throw new RejectedExecutionException("Service is not started."); } if (isTerminated()) { throw new RejectedExecutionException("Service is closed."); } } catch (InterruptedException e) { log.debug("Interrupted waiting for start"); Thread.currentThread().interrupt(); throw new RuntimeException(e); } } private boolean isTerminated() { return concurrentBatch.get() == null; } @Override public void emit(Event event) { emitAndReturnBatch(event); } @VisibleForTesting @Nullable Batch emitAndReturnBatch(Event event) { awaitStarted(); final byte[] eventBytes = eventToBytes(event); if (eventBytes.length > MAX_EVENT_SIZE) { log.error( "Event too large to emit (%,d > %,d): %s ...", eventBytes.length, MAX_EVENT_SIZE, StringUtils.fromUtf8(ByteBuffer.wrap(eventBytes), 1024) ); return null; } if (eventBytes.length > largeEventThreshold) { writeLargeEvent(eventBytes); return null; } while (true) { Object batchObj = concurrentBatch.get(); if (batchObj instanceof Long) { tryRecoverCurrentBatch((Long) batchObj); continue; } if (batchObj == null) { throw new RejectedExecutionException("Service is closed."); } Batch batch = (Batch) batchObj; if (batch.tryAddEvent(eventBytes)) { return batch; } else { log.debug("Failed to emit an event in batch [%s]", batch); } // Spin loop, until the thread calling onSealExclusive() updates the concurrentBatch. This update becomes visible // eventually, because concurrentBatch.get() is a volatile read. } } private byte[] eventToBytes(Event event) { try { return jsonMapper.writeValueAsBytes(event); } catch (IOException e) { throw new RuntimeException(e); } } private void writeLargeEvent(byte[] eventBytes) { // It's better to drop the oldest, not latest event, but dropping the oldest is not easy to implement, because // LARGE_EVENTS_STOP could be added into the queue concurrently. So just not adding the latest event. // >, not >=, because largeEventsToEmit could contain LARGE_EVENTS_STOP if (approximateBuffersToEmitCount.get() > config.getBatchQueueSizeLimit()) { log.error( "largeEventsToEmit queue size reached the limit [%d], dropping the latest large event", config.getBatchQueueSizeLimit() ); } else { largeEventsToEmit.add(eventBytes); approximateBuffersToEmitCount.incrementAndGet(); approximateLargeEventsToEmitCount.incrementAndGet(); approximateEventsToEmitCount.incrementAndGet(); } wakeUpEmittingThread(); } /** * Called from {@link Batch} only once for each Batch in existence. */ void onSealExclusive(Batch batch, long elapsedTimeMillis) { try { doOnSealExclusive(batch, elapsedTimeMillis); } catch (Throwable t) { try { if (!concurrentBatch.compareAndSet(batch, batch.batchNumber)) { log.error("Unexpected failure to set currentBatch to the failed Batch.batchNumber"); } log.error(t, "Serious error during onSealExclusive(), set currentBatch to the failed Batch.batchNumber"); } catch (Throwable t2) { t.addSuppressed(t2); } throw t; } } private void doOnSealExclusive(Batch batch, long elapsedTimeMillis) { batchFillingTimeCounter.add((int) Math.max(elapsedTimeMillis, 0)); if (elapsedTimeMillis > 0) { // If elapsedTimeMillis is 0 or negative, it's likely because System.currentTimeMillis() is not monotonic, so not // accounting this time for determining batch sending timeout. lastBatchFillTimeMillis = elapsedTimeMillis; } addBatchToEmitQueue(batch); wakeUpEmittingThread(); if (!isTerminated()) { long nextBatchNumber = ConcurrentAwaitableCounter.nextCount(batch.batchNumber); byte[] newBuffer = acquireBuffer(); if (!concurrentBatch.compareAndSet(batch, new Batch(this, newBuffer, nextBatchNumber))) { buffersToReuse.add(newBuffer); // If compareAndSet failed, the service should be closed concurrently, i. e. we expect isTerminated() = true. // If we don't see this, there should be some bug in HttpPostEmitter. Preconditions.checkState(isTerminated()); } } } private void tryRecoverCurrentBatch(Long failedBatchNumber) { log.info("Trying to recover currentBatch"); long nextBatchNumber = ConcurrentAwaitableCounter.nextCount(failedBatchNumber); byte[] newBuffer = acquireBuffer(); if (concurrentBatch.compareAndSet(failedBatchNumber, new Batch(this, newBuffer, nextBatchNumber))) { log.info("Successfully recovered currentBatch"); } else { // It's normal, a concurrent thread could succeed to recover first. buffersToReuse.add(newBuffer); } } private void addBatchToEmitQueue(Batch batch) { limitBuffersToEmitSize(); buffersToEmit.addLast(batch); approximateBuffersToEmitCount.incrementAndGet(); approximateEventsToEmitCount.addAndGet(batch.eventCount.get()); } private void limitBuffersToEmitSize() { if (approximateBuffersToEmitCount.get() >= config.getBatchQueueSizeLimit()) { Batch droppedBatch = buffersToEmit.pollFirst(); if (droppedBatch != null) { batchFinalized(); approximateBuffersToEmitCount.decrementAndGet(); approximateEventsToEmitCount.addAndGet(-droppedBatch.eventCount.get()); droppedBuffers.incrementAndGet(); log.error( "buffersToEmit queue size reached the limit [%d], dropping the oldest buffer to emit", config.getBatchQueueSizeLimit() ); } } } private void batchFinalized() { // Notify HttpPostEmitter.flush(), that the batch is emitted, or failed, or dropped. emittedBatchCounter.increment(); } private Batch pollBatchFromEmitQueue() { Batch result = buffersToEmit.pollFirst(); if (result == null) { return null; } approximateBuffersToEmitCount.decrementAndGet(); approximateEventsToEmitCount.addAndGet(-result.eventCount.get()); return result; } private void wakeUpEmittingThread() { LockSupport.unpark(emittingThread); } @Override public void flush() throws IOException { awaitStarted(); Object batchObj = concurrentBatch.get(); if (batchObj instanceof Batch) { flush((Batch) batchObj); } } private void flush(Batch batch) throws IOException { if (batch == null) { return; } batch.seal(); try { // This check doesn't always awaits for this exact batch to be emitted, because another batch could be dropped // from the queue ahead of this one, in limitBuffersToEmitSize(). But there is no better way currently to wait for // the exact batch, and it's not that important. emittedBatchCounter.awaitCount(batch.batchNumber, config.getFlushTimeOut(), TimeUnit.MILLISECONDS); } catch (TimeoutException e) { String message = StringUtils.format("Timed out after [%d] millis during flushing", config.getFlushTimeOut()); throw new IOException(message, e); } catch (InterruptedException e) { log.debug("Thread Interrupted"); Thread.currentThread().interrupt(); throw new IOException("Thread Interrupted while flushing", e); } } @Override @LifecycleStop public void close() throws IOException { synchronized (startLock) { if (running) { running = false; Object lastBatch = concurrentBatch.getAndSet(null); if (lastBatch instanceof Batch) { flush((Batch) lastBatch); } emittingThread.shuttingDown = true; // EmittingThread is interrupted after the last batch is flushed. emittingThread.interrupt(); } } } @Override public String toString() { return "HttpPostEmitter{" + "config=" + config + '}'; } private class EmittingThread extends Thread { private final ArrayDeque<FailedBuffer> failedBuffers = new ArrayDeque<>(); /** * "Approximate", because not exactly synchronized with {@link #failedBuffers} updates. Not using size() on * {@link #failedBuffers}, because access to it is not synchronized, while approximateFailedBuffersCount is queried * not within EmittingThread. */ private final AtomicInteger approximateFailedBuffersCount = new AtomicInteger(); private final ConcurrentTimeCounter successfulSendingTimeCounter = new ConcurrentTimeCounter(); private final ConcurrentTimeCounter failedSendingTimeCounter = new ConcurrentTimeCounter(); /** * Cache the exception. Need an exception because {@link RetryUtils} operates only via exceptions. */ private final TimeoutException timeoutLessThanMinimumException; private boolean shuttingDown = false; private ZeroCopyByteArrayOutputStream gzipBaos; EmittingThread(HttpEmitterConfig config) { super("HttpPostEmitter-" + INSTANCE_COUNTER.incrementAndGet()); setDaemon(true); timeoutLessThanMinimumException = new TimeoutException( "Timeout less than minimum [" + config.getMinHttpTimeoutMillis() + "] ms." ); // To not showing and writing nonsense and misleading stack trace in logs. timeoutLessThanMinimumException.setStackTrace(new StackTraceElement[]{}); } @Override public void run() { while (true) { boolean needsToShutdown = needsToShutdown(); try { emitLargeEvents(); emitBatches(); tryEmitOneFailedBuffer(); if (needsToShutdown) { tryEmitAndDrainAllFailedBuffers(); // Make GC life easier drainBuffersToReuse(); return; } } catch (Throwable t) { log.error(t, "Uncaught exception in EmittingThread.run()"); } if (failedBuffers.isEmpty()) { // Waiting for 1/2 of config.getFlushMillis() in order to flush events not more than 50% later than specified. // If nanos=0 parkNanos() doesn't wait at all, then we don't want. long waitNanos = Math.max(TimeUnit.MILLISECONDS.toNanos(config.getFlushMillis()) / 2, 1); LockSupport.parkNanos(HttpPostEmitter.this, waitNanos); } } } private boolean needsToShutdown() { boolean needsToShutdown = Thread.interrupted() || shuttingDown; if (needsToShutdown) { Object lastBatch = concurrentBatch.getAndSet(null); if (lastBatch instanceof Batch) { ((Batch) lastBatch).seal(); } } else { Object batch = concurrentBatch.get(); if (batch instanceof Batch) { ((Batch) batch).sealIfFlushNeeded(); } else { // batch == null means that HttpPostEmitter is terminated. Batch object might also be a Long object if some // thread just failed with a serious error in onSealExclusive(). In this case we don't want to shutdown // the emitter thread. needsToShutdown = batch == null; } } return needsToShutdown; } private void emitBatches() { for (Batch batch; (batch = pollBatchFromEmitQueue()) != null; ) { emit(batch); } } private void emit(final Batch batch) { // Awaits until all concurrent event writers finish copy their event bytes to the buffer. This call provides // memory visibility guarantees. batch.awaitEmittingAllowed(); try { final int bufferWatermark = batch.getSealedBufferWatermark(); if (bufferWatermark == 0) { // sealed while empty return; } int eventCount = batch.eventCount.get(); log.debug( "Sending batch #%d to url[%s], event count[%d], bytes[%d]", batch.batchNumber, url, eventCount, bufferWatermark ); int bufferEndOffset = batchingStrategy.writeBatchEnd(batch.buffer, bufferWatermark); if (sendWithRetries(batch.buffer, bufferEndOffset, eventCount, true)) { buffersToReuse.add(batch.buffer); approximateBuffersToReuseCount.incrementAndGet(); } else { limitFailedBuffersSize(); failedBuffers.addLast(new FailedBuffer(batch.buffer, bufferEndOffset, eventCount)); approximateFailedBuffersCount.incrementAndGet(); } } finally { batchFinalized(); } } private void limitFailedBuffersSize() { if (failedBuffers.size() >= config.getBatchQueueSizeLimit()) { failedBuffers.removeFirst(); approximateFailedBuffersCount.decrementAndGet(); droppedBuffers.incrementAndGet(); log.error( "failedBuffers queue size reached the limit [%d], dropping the oldest failed buffer", config.getBatchQueueSizeLimit() ); } } @SuppressWarnings("ArrayEquality") private void emitLargeEvents() { if (largeEventsToEmit.isEmpty()) { return; } // Don't try to emit large events until exhaustion, to avoid starvation of "normal" batches, if large event // posting rate is too high, though it should never happen in practice. largeEventsToEmit.add(LARGE_EVENTS_STOP); for (byte[] largeEvent; (largeEvent = largeEventsToEmit.poll()) != LARGE_EVENTS_STOP; ) { emitLargeEvent(largeEvent); approximateBuffersToEmitCount.decrementAndGet(); approximateLargeEventsToEmitCount.decrementAndGet(); approximateEventsToEmitCount.decrementAndGet(); } } private void emitLargeEvent(byte[] eventBytes) { byte[] buffer = acquireBuffer(); int bufferOffset = batchingStrategy.writeBatchStart(buffer); System.arraycopy(eventBytes, 0, buffer, bufferOffset, eventBytes.length); bufferOffset += eventBytes.length; bufferOffset = batchingStrategy.writeBatchEnd(buffer, bufferOffset); if (sendWithRetries(buffer, bufferOffset, 1, true)) { buffersToReuse.add(buffer); approximateBuffersToReuseCount.incrementAndGet(); } else { limitFailedBuffersSize(); failedBuffers.addLast(new FailedBuffer(buffer, bufferOffset, 1)); approximateFailedBuffersCount.incrementAndGet(); } } private void tryEmitOneFailedBuffer() { FailedBuffer failedBuffer = failedBuffers.peekFirst(); if (failedBuffer != null) { if (sendWithRetries(failedBuffer.buffer, failedBuffer.length, failedBuffer.eventCount, false)) { // Remove from the queue of failed buffer. failedBuffers.pollFirst(); approximateFailedBuffersCount.decrementAndGet(); // Don't add the failed buffer back to the buffersToReuse queue here, because in a situation when we were not // able to emit events for a while we don't have a way to discard buffers that were used to accumulate events // during that period, if they are added back to buffersToReuse. For instance it may result in having 100 // buffers in rotation even if we need just 2. } } } private void tryEmitAndDrainAllFailedBuffers() { for (FailedBuffer failedBuffer; (failedBuffer = failedBuffers.pollFirst()) != null; ) { sendWithRetries(failedBuffer.buffer, failedBuffer.length, failedBuffer.eventCount, false); approximateFailedBuffersCount.decrementAndGet(); } } /** * Returns true if sent successfully. */ private boolean sendWithRetries(final byte[] buffer, final int length, final int eventCount, boolean withTimeout) { long deadLineMillis = System.currentTimeMillis() + computeTimeoutForSendRequestInMillis(lastBatchFillTimeMillis); try { RetryUtils.retry( new RetryUtils.Task<Object>() { @Override public Void perform() throws Exception { send(buffer, length); return null; } }, new Predicate<Throwable>() { @Override public boolean apply(Throwable e) { if (withTimeout && deadLineMillis - System.currentTimeMillis() <= 0) { // overflow-aware return false; } if (e == timeoutLessThanMinimumException) { return false; // Doesn't make sense to retry, because the result will be the same. } return !(e instanceof InterruptedException); } }, MAX_SEND_RETRIES ); totalEmittedEvents.addAndGet(eventCount); return true; } catch (InterruptedException e) { return false; } catch (Exception e) { if (e == timeoutLessThanMinimumException) { log.debug(e, "Failed to send events to url[%s] with timeout less than minimum", config.getRecipientBaseUrl()); } else { log.error(e, "Failed to send events to url[%s]", config.getRecipientBaseUrl()); } return false; } } private void send(byte[] buffer, int length) throws Exception { long lastFillTimeMillis = HttpPostEmitter.this.lastBatchFillTimeMillis; final long timeoutMillis = computeTimeoutForSendRequestInMillis(lastFillTimeMillis); if (timeoutMillis < config.getMinHttpTimeoutMillis()) { throw timeoutLessThanMinimumException; } long sendingStartMs = System.currentTimeMillis(); final RequestBuilder request = new RequestBuilder("POST"); request.setUrl(url); byte[] payload; int payloadLength; ContentEncoding contentEncoding = config.getContentEncoding(); if (contentEncoding != null) { switch (contentEncoding) { case GZIP: try (GZIPOutputStream gzipOutputStream = acquireGzipOutputStream(length)) { gzipOutputStream.write(buffer, 0, length); } payload = gzipBaos.getBuffer(); payloadLength = gzipBaos.size(); request.setHeader(HttpHeaders.Names.CONTENT_ENCODING, HttpHeaders.Values.GZIP); break; default: throw new ISE("Unsupported content encoding [%s]", contentEncoding.name()); } } else { payload = buffer; payloadLength = length; } request.setHeader(HttpHeaders.Names.CONTENT_TYPE, "application/json"); request.setHeader(HttpHeaders.Names.CONTENT_LENGTH, String.valueOf(payloadLength)); request.setBody(ByteBuffer.wrap(payload, 0, payloadLength)); if (config.getBasicAuthentication() != null) { final String[] parts = config.getBasicAuthentication().getPassword().split(":", 2); final String user = parts[0]; final String password = parts.length > 1 ? parts[1] : ""; String encoded = StringUtils.encodeBase64String((user + ':' + password).getBytes(StandardCharsets.UTF_8)); request.setHeader(HttpHeaders.Names.AUTHORIZATION, "Basic " + encoded); } request.setRequestTimeout(Ints.saturatedCast(timeoutMillis)); ListenableFuture<Response> future = client.executeRequest(request); Response response; try { // Don't use Future.get(timeout), because we want to avoid sending the same data twice, in case the send // succeeds finally, but after the timeout. response = future.get(); } catch (ExecutionException e) { accountFailedSending(sendingStartMs); if (e.getCause() instanceof TimeoutException) { log.error( "Timing out emitter batch send, last batch fill time [%,d] ms, timeout [%,d] ms", lastFillTimeMillis, timeoutMillis ); } throw e; } if (response.getStatusCode() == 413) { accountFailedSending(sendingStartMs); throw new ISE( "Received HTTP status 413 from [%s]. Batch size of [%d] may be too large, " + "try adjusting maxBatchSizeBatch property", config.getRecipientBaseUrl(), config.getMaxBatchSize() ); } if (response.getStatusCode() / 100 != 2) { accountFailedSending(sendingStartMs); throw new ISE( "Emissions of events not successful[%d: %s], with message[%s].", response.getStatusCode(), response.getStatusText(), response.getResponseBody(StandardCharsets.UTF_8).trim() ); } accountSuccessfulSending(sendingStartMs); } /** * This method computes the timeout for sending a batch of events over HTTP, based on how much time it took to * populate that batch. The idea is that if it took X milliseconds to fill the batch, we couldn't wait for more than * X * {@link HttpEmitterConfig#httpTimeoutAllowanceFactor} milliseconds to send that data, because at the same time * the next batch is probably being filled with the same speed, so we have to keep up with the speed. * * Ideally it should use something like moving average instead of plain last batch fill time in order to accomodate * for emitting bursts, but it might unnecessary because Druid application might not produce events in bursts. */ private long computeTimeoutForSendRequestInMillis(long lastBatchFillTimeMillis) { int emitQueueSize = approximateBuffersToEmitCount.get(); if (emitQueueSize < EMIT_QUEUE_THRESHOLD_1) { return (long) (lastBatchFillTimeMillis * config.httpTimeoutAllowanceFactor); } if (emitQueueSize < EMIT_QUEUE_THRESHOLD_2) { // The idea is to not let buffersToEmit queue to grow faster than we can emit buffers. return (long) (lastBatchFillTimeMillis * EQUILIBRIUM_ALLOWANCE_FACTOR); } // If buffersToEmit still grows, try to restrict even more return (long) (lastBatchFillTimeMillis * TIGHT_ALLOWANCE_FACTOR); } private void accountSuccessfulSending(long sendingStartMs) { successfulSendingTimeCounter.add((int) Math.max(System.currentTimeMillis() - sendingStartMs, 0)); } private void accountFailedSending(long sendingStartMs) { failedSendingTimeCounter.add((int) Math.max(System.currentTimeMillis() - sendingStartMs, 0)); } GZIPOutputStream acquireGzipOutputStream(int length) throws IOException { if (gzipBaos == null) { gzipBaos = new ZeroCopyByteArrayOutputStream(length); } else { gzipBaos.reset(); } return new GZIPOutputStream(gzipBaos, true); } } private static class FailedBuffer { final byte[] buffer; final int length; final int eventCount; private FailedBuffer(byte[] buffer, int length, int eventCount) { this.buffer = buffer; this.length = length; this.eventCount = eventCount; } } private byte[] acquireBuffer() { byte[] buffer = buffersToReuse.poll(); if (buffer == null) { buffer = new byte[bufferSize]; allocatedBuffers.incrementAndGet(); } else { approximateBuffersToReuseCount.decrementAndGet(); } return buffer; } private void drainBuffersToReuse() { while (buffersToReuse.poll() != null) { approximateBuffersToReuseCount.decrementAndGet(); } } /** * This and the following methods are public for external monitoring purposes. */ public int getTotalAllocatedBuffers() { return allocatedBuffers.get(); } public int getBuffersToEmit() { return approximateBuffersToEmitCount.get(); } public int getBuffersToReuse() { return approximateBuffersToReuseCount.get(); } public int getTotalFailedBuffers() { return emittingThread.approximateFailedBuffersCount.get(); } public int getTotalDroppedBuffers() { return droppedBuffers.get(); } public long getTotalEmittedEvents() { return totalEmittedEvents.get(); } public long getEventsToEmit() { return approximateEventsToEmitCount.get(); } public long getLargeEventsToEmit() { return approximateLargeEventsToEmitCount.get(); } public ConcurrentTimeCounter getBatchFillingTimeCounter() { return batchFillingTimeCounter; } public ConcurrentTimeCounter getSuccessfulSendingTimeCounter() { return emittingThread.successfulSendingTimeCounter; } public ConcurrentTimeCounter getFailedSendingTimeCounter() { return emittingThread.failedSendingTimeCounter; } @VisibleForTesting void waitForEmission(int batchNumber) throws Exception { emittedBatchCounter.awaitCount(batchNumber, 10, TimeUnit.SECONDS); } @VisibleForTesting void joinEmitterThread() throws InterruptedException { emittingThread.join(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.rest; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; import java.util.HashMap; import java.util.Map; import static java.util.Collections.unmodifiableMap; public enum RestStatus { /** * The client SHOULD continue with its request. This interim response is used to inform the client that the * initial part of the request has been received and has not yet been rejected by the server. The client * SHOULD continue by sending the remainder of the request or, if the request has already been completed, * ignore this response. The server MUST send a final response after the request has been completed. */ CONTINUE(100), /** * The server understands and is willing to comply with the client's request, via the Upgrade message header field * (section 14.42), for a change in the application protocol being used on this connection. The server will * switch protocols to those defined by the response's Upgrade header field immediately after the empty line * which terminates the 101 response. */ SWITCHING_PROTOCOLS(101), /** * The request has succeeded. The information returned with the response is dependent on the method * used in the request, for example: * <ul> * <li>GET: an entity corresponding to the requested resource is sent in the response;</li> * <li>HEAD: the entity-header fields corresponding to the requested resource are sent in the response without any message-body;</li> * <li>POST: an entity describing or containing the result of the action;</li> * <li>TRACE: an entity containing the request message as received by the end server.</li> * </ul> */ OK(200), /** * The request has been fulfilled and resulted in a new resource being created. The newly created resource can * be referenced by the URI(s) returned in the entity of the response, with the most specific URI for the * resource given by a Location header field. The response SHOULD include an entity containing a list of resource * characteristics and location(s) from which the user or user agent can choose the one most appropriate. The * entity format is specified by the media type given in the Content-Type header field. The origin server MUST * create the resource before returning the 201 status code. If the action cannot be carried out immediately, the * server SHOULD respond with 202 (Accepted) response instead. * <p> * A 201 response MAY contain an ETag response header field indicating the current value of the entity tag * for the requested variant just created, see section 14.19. */ CREATED(201), /** * The request has been accepted for processing, but the processing has not been completed. The request might * or might not eventually be acted upon, as it might be disallowed when processing actually takes place. There * is no facility for re-sending a status code from an asynchronous operation such as this. * <p> * The 202 response is intentionally non-committal. Its purpose is to allow a server to accept a request for * some other process (perhaps a batch-oriented process that is only run once per day) without requiring that * the user agent's connection to the server persist until the process is completed. The entity returned with * this response SHOULD include an indication of the request's current status and either a pointer to a status * monitor or some estimate of when the user can expect the request to be fulfilled. */ ACCEPTED(202), /** * The returned meta information in the entity-header is not the definitive set as available from the origin * server, but is gathered from a local or a third-party copy. The set presented MAY be a subset or super set * of the original version. For example, including local annotation information about the resource might * result in a super set of the meta information known by the origin server. Use of this response code * is not required and is only appropriate when the response would otherwise be 200 (OK). */ NON_AUTHORITATIVE_INFORMATION(203), /** * The server has fulfilled the request but does not need to return an entity-body, and might want to return * updated meta information. The response MAY include new or updated meta information in the form of * entity-headers, which if present SHOULD be associated with the requested variant. * <p> * If the client is a user agent, it SHOULD NOT change its document view from that which caused the request * to be sent. This response is primarily intended to allow input for actions to take place without causing a * change to the user agent's active document view, although any new or updated meta information SHOULD be * applied to the document currently in the user agent's active view. * <p> * The 204 response MUST NOT include a message-body, and thus is always terminated by the first empty * line after the header fields. */ NO_CONTENT(204), /** * The server has fulfilled the request and the user agent SHOULD reset the document view which caused the * request to be sent. This response is primarily intended to allow input for actions to take place via user * input, followed by a clearing of the form in which the input is given so that the user can easily initiate * another input action. The response MUST NOT include an entity. */ RESET_CONTENT(205), /** * The server has fulfilled the partial GET request for the resource. The request MUST have included a Range * header field (section 14.35) indicating the desired range, and MAY have included an If-Range header * field (section 14.27) to make the request conditional. * <p> * The response MUST include the following header fields: * <ul> * <li>Either a Content-Range header field (section 14.16) indicating the range included with this response, * or a multipart/byteranges Content-Type including Content-Range fields for each part. If a Content-Length * header field is present in the response, its value MUST match the actual number of OCTETs transmitted in * the message-body.</li> * <li>Date</li> * <li>ETag and/or Content-Location, if the header would have been sent in a 200 response to the same request</li> * <li>Expires, Cache-Control, and/or Vary, if the field-value might differ from that sent in any previous * response for the same variant</li> * </ul> * <p> * If the 206 response is the result of an If-Range request that used a strong cache validator * (see section 13.3.3), the response SHOULD NOT include other entity-headers. If the response is the result * of an If-Range request that used a weak validator, the response MUST NOT include other entity-headers; * this prevents inconsistencies between cached entity-bodies and updated headers. Otherwise, the response MUST * include all of the entity-headers that would have been returned with a 200 (OK) response to the same request. * <p> * A cache MUST NOT combine a 206 response with other previously cached content if the ETag or Last-Modified * headers do not match exactly, see 13.5.4. * <p> * A cache that does not support the Range and Content-Range headers MUST NOT cache 206 (Partial) responses. */ PARTIAL_CONTENT(206), /** * The 207 (Multi-Status) status code provides status for multiple independent operations (see Section 13 for * more information). * <p> * A Multi-Status response conveys information about multiple resources in situations where multiple status * codes might be appropriate. The default Multi-Status response body is a text/xml or application/xml HTTP * entity with a 'multistatus' root element. Further elements contain 200, 300, 400, and 500 series status codes * generated during the method invocation. 100 series status codes SHOULD NOT be recorded in a 'response' * XML element. * <p> * Although '207' is used as the overall response status code, the recipient needs to consult the contents * of the multistatus response body for further information about the success or failure of the method execution. * The response MAY be used in success, partial success and also in failure situations. * <p> * The 'multistatus' root element holds zero or more 'response' elements in any order, each with * information about an individual resource. Each 'response' element MUST have an 'href' element * to identify the resource. */ MULTI_STATUS(207), /** * The requested resource corresponds to any one of a set of representations, each with its own specific * location, and agent-driven negotiation information (section 12) is being provided so that the user (or user * agent) can select a preferred representation and redirect its request to that location. * <p> * Unless it was a HEAD request, the response SHOULD include an entity containing a list of resource * characteristics and location(s) from which the user or user agent can choose the one most appropriate. * The entity format is specified by the media type given in the Content-Type header field. Depending upon the * format and the capabilities of the user agent, selection of the most appropriate choice MAY be performed * automatically. However, this specification does not define any standard for such automatic selection. * <p> * If the server has a preferred choice of representation, it SHOULD include the specific URI for that * representation in the Location field; user agents MAY use the Location field value for automatic redirection. * This response is cacheable unless indicated otherwise. */ MULTIPLE_CHOICES(300), /** * The requested resource has been assigned a new permanent URI and any future references to this resource * SHOULD use one of the returned URIs. Clients with link editing capabilities ought to automatically re-link * references to the Request-URI to one or more of the new references returned by the server, where possible. * This response is cacheable unless indicated otherwise. * <p> * The new permanent URI SHOULD be given by the Location field in the response. Unless the request method * was HEAD, the entity of the response SHOULD contain a short hypertext note with a hyperlink to the new URI(s). * <p> * If the 301 status code is received in response to a request other than GET or HEAD, the user agent * MUST NOT automatically redirect the request unless it can be confirmed by the user, since this might change * the conditions under which the request was issued. */ MOVED_PERMANENTLY(301), /** * The requested resource resides temporarily under a different URI. Since the redirection might be altered on * occasion, the client SHOULD continue to use the Request-URI for future requests. This response is only * cacheable if indicated by a Cache-Control or Expires header field. * <p> * The temporary URI SHOULD be given by the Location field in the response. Unless the request method was * HEAD, the entity of the response SHOULD contain a short hypertext note with a hyperlink to the new URI(s). * <p> * If the 302 status code is received in response to a request other than GET or HEAD, the user agent * MUST NOT automatically redirect the request unless it can be confirmed by the user, since this might change * the conditions under which the request was issued. */ FOUND(302), /** * The response to the request can be found under a different URI and SHOULD be retrieved using a GET method on * that resource. This method exists primarily to allow the output of a POST-activated script to redirect the * user agent to a selected resource. The new URI is not a substitute reference for the originally requested * resource. The 303 response MUST NOT be cached, but the response to the second (redirected) request might be * cacheable. * <p> * The different URI SHOULD be given by the Location field in the response. Unless the request method was * HEAD, the entity of the response SHOULD contain a short hypertext note with a hyperlink to the new URI(s). */ SEE_OTHER(303), /** * If the client has performed a conditional GET request and access is allowed, but the document has not been * modified, the server SHOULD respond with this status code. The 304 response MUST NOT contain a message-body, * and thus is always terminated by the first empty line after the header fields. * <p> * The response MUST include the following header fields: * <ul> * <li>Date, unless its omission is required by section 14.18.1 * If a clockless origin server obeys these rules, and proxies and clients add their own Date to any * response received without one (as already specified by [RFC 2068], section 14.19), caches will operate * correctly. * </li> * <li>ETag and/or Content-Location, if the header would have been sent in a 200 response to the same request</li> * <li>Expires, Cache-Control, and/or Vary, if the field-value might differ from that sent in any previous * response for the same variant</li> * </ul> * <p> * If the conditional GET used a strong cache validator (see section 13.3.3), the response SHOULD NOT include * other entity-headers. Otherwise (i.e., the conditional GET used a weak validator), the response MUST NOT * include other entity-headers; this prevents inconsistencies between cached entity-bodies and updated headers. * <p> * If a 304 response indicates an entity not currently cached, then the cache MUST disregard the response * and repeat the request without the conditional. * <p> * If a cache uses a received 304 response to update a cache entry, the cache MUST update the entry to * reflect any new field values given in the response. */ NOT_MODIFIED(304), /** * The requested resource MUST be accessed through the proxy given by the Location field. The Location field * gives the URI of the proxy. The recipient is expected to repeat this single request via the proxy. * 305 responses MUST only be generated by origin servers. */ USE_PROXY(305), /** * The requested resource resides temporarily under a different URI. Since the redirection MAY be altered on * occasion, the client SHOULD continue to use the Request-URI for future requests. This response is only * cacheable if indicated by a Cache-Control or Expires header field. * <p> * The temporary URI SHOULD be given by the Location field in the response. Unless the request method was * HEAD, the entity of the response SHOULD contain a short hypertext note with a hyperlink to the new URI(s) , * since many pre-HTTP/1.1 user agents do not understand the 307 status. Therefore, the note SHOULD contain * the information necessary for a user to repeat the original request on the new URI. * <p> * If the 307 status code is received in response to a request other than GET or HEAD, the user agent MUST NOT * automatically redirect the request unless it can be confirmed by the user, since this might change the * conditions under which the request was issued. */ TEMPORARY_REDIRECT(307), /** * The request could not be understood by the server due to malformed syntax. The client SHOULD NOT repeat the * request without modifications. */ BAD_REQUEST(400), /** * The request requires user authentication. The response MUST include a WWW-Authenticate header field * (section 14.47) containing a challenge applicable to the requested resource. The client MAY repeat the request * with a suitable Authorization header field (section 14.8). If the request already included Authorization * credentials, then the 401 response indicates that authorization has been refused for those credentials. * If the 401 response contains the same challenge as the prior response, and the user agent has already attempted * authentication at least once, then the user SHOULD be presented the entity that was given in the response, * since that entity might include relevant diagnostic information. HTTP access authentication is explained in * "HTTP Authentication: Basic and Digest Access Authentication" [43]. */ UNAUTHORIZED(401), /** * This code is reserved for future use. */ PAYMENT_REQUIRED(402), /** * The server understood the request, but is refusing to fulfill it. Authorization will not help and the request * SHOULD NOT be repeated. If the request method was not HEAD and the server wishes to make public why the * request has not been fulfilled, it SHOULD describe the reason for the refusal in the entity. If the server * does not wish to make this information available to the client, the status code 404 (Not Found) can be used * instead. */ FORBIDDEN(403), /** * The server has not found anything matching the Request-URI. No indication is given of whether the condition * is temporary or permanent. The 410 (Gone) status code SHOULD be used if the server knows, through some * internally configurable mechanism, that an old resource is permanently unavailable and has no forwarding * address. This status code is commonly used when the server does not wish to reveal exactly why the request * has been refused, or when no other response is applicable. */ NOT_FOUND(404), /** * The method specified in the Request-Line is not allowed for the resource identified by the Request-URI. * The response MUST include an Allow header containing a list of valid methods for the requested resource. */ METHOD_NOT_ALLOWED(405), /** * The resource identified by the request is only capable of generating response entities which have content * characteristics not acceptable according to the accept headers sent in the request. * <p> * Unless it was a HEAD request, the response SHOULD include an entity containing a list of available entity * characteristics and location(s) from which the user or user agent can choose the one most appropriate. * The entity format is specified by the media type given in the Content-Type header field. Depending upon the * format and the capabilities of the user agent, selection of the most appropriate choice MAY be performed * automatically. However, this specification does not define any standard for such automatic selection. * <p> * Note: HTTP/1.1 servers are allowed to return responses which are not acceptable according to the accept * headers sent in the request. In some cases, this may even be preferable to sending a 406 response. User * agents are encouraged to inspect the headers of an incoming response to determine if it is acceptable. * <p> * If the response could be unacceptable, a user agent SHOULD temporarily stop receipt of more data and query * the user for a decision on further actions. */ NOT_ACCEPTABLE(406), /** * This code is similar to 401 (Unauthorized), but indicates that the client must first authenticate itself with * the proxy. The proxy MUST return a Proxy-Authenticate header field (section 14.33) containing a challenge * applicable to the proxy for the requested resource. The client MAY repeat the request with a suitable * Proxy-Authorization header field (section 14.34). HTTP access authentication is explained in * "HTTP Authentication: Basic and Digest Access Authentication" [43]. */ PROXY_AUTHENTICATION(407), /** * The client did not produce a request within the time that the server was prepared to wait. The client MAY * repeat the request without modifications at any later time. */ REQUEST_TIMEOUT(408), /** * The request could not be completed due to a conflict with the current state of the resource. This code is * only allowed in situations where it is expected that the user might be able to resolve the conflict and * resubmit the request. The response body SHOULD include enough information for the user to recognize the * source of the conflict. Ideally, the response entity would include enough information for the user or user * agent to fix the problem; however, that might not be possible and is not required. * <p> * Conflicts are most likely to occur in response to a PUT request. For example, if versioning were being * used and the entity being PUT included changes to a resource which conflict with those made by an earlier * (third-party) request, the server might use the 409 response to indicate that it can't complete the request. * In this case, the response entity would likely contain a list of the differences between the two versions in * a format defined by the response Content-Type. */ CONFLICT(409), /** * The requested resource is no longer available at the server and no forwarding address is known. This condition * is expected to be considered permanent. Clients with link editing capabilities SHOULD delete references to * the Request-URI after user approval. If the server does not know, or has no facility to determine, whether or * not the condition is permanent, the status code 404 (Not Found) SHOULD be used instead. This response is * cacheable unless indicated otherwise. * <p> * The 410 response is primarily intended to assist the task of web maintenance by notifying the recipient * that the resource is intentionally unavailable and that the server owners desire that remote links to that * resource be removed. Such an event is common for limited-time, promotional services and for resources belonging * to individuals no longer working at the server's site. It is not necessary to mark all permanently unavailable * resources as "gone" or to keep the mark for any length of time -- that is left to the discretion of the server * owner. */ GONE(410), /** * The server refuses to accept the request without a defined Content-Length. The client MAY repeat the request * if it adds a valid Content-Length header field containing the length of the message-body in the request message. */ LENGTH_REQUIRED(411), /** * The precondition given in one or more of the request-header fields evaluated to false when it was tested on * the server. This response code allows the client to place preconditions on the current resource metainformation * (header field data) and thus prevent the requested method from being applied to a resource other than the one * intended. */ PRECONDITION_FAILED(412), /** * The server is refusing to process a request because the request entity is larger than the server is willing * or able to process. The server MAY close the connection to prevent the client from continuing the request. * <p> * If the condition is temporary, the server SHOULD include a Retry-After header field to indicate that it * is temporary and after what time the client MAY try again. */ REQUEST_ENTITY_TOO_LARGE(413), /** * The server is refusing to service the request because the Request-URI is longer than the server is willing * to interpret. This rare condition is only likely to occur when a client has improperly converted a POST * request to a GET request with long query information, when the client has descended into a URI "black hole" * of redirection (e.g., a redirected URI prefix that points to a suffix of itself), or when the server is * under attack by a client attempting to exploit security holes present in some servers using fixed-length * buffers for reading or manipulating the Request-URI. */ REQUEST_URI_TOO_LONG(414), /** * The server is refusing to service the request because the entity of the request is in a format not supported * by the requested resource for the requested method. */ UNSUPPORTED_MEDIA_TYPE(415), /** * A server SHOULD return a response with this status code if a request included a Range request-header field * (section 14.35), and none of the range-specifier values in this field overlap the current extent of the * selected resource, and the request did not include an If-Range request-header field. (For byte-ranges, this * means that the first-byte-pos of all of the byte-range-spec values were greater than the current length of * the selected resource.) * <p> * When this status code is returned for a byte-range request, the response SHOULD include a Content-Range * entity-header field specifying the current length of the selected resource (see section 14.16). This * response MUST NOT use the multipart/byteranges content-type. */ REQUESTED_RANGE_NOT_SATISFIED(416), /** * The expectation given in an Expect request-header field (see section 14.20) could not be met by this server, * or, if the server is a proxy, the server has unambiguous evidence that the request could not be met by the * next-hop server. */ EXPECTATION_FAILED(417), /** * The 422 (Unprocessable Entity) status code means the server understands the content type of the request * entity (hence a 415(Unsupported Media Type) status code is inappropriate), and the syntax of the request * entity is correct (thus a 400 (Bad Request) status code is inappropriate) but was unable to process the * contained instructions. For example, this error condition may occur if an XML request body contains * well-formed (i.e., syntactically correct), but semantically erroneous, XML instructions. */ UNPROCESSABLE_ENTITY(422), /** * The 423 (Locked) status code means the source or destination resource of a method is locked. This response * SHOULD contain an appropriate precondition or postcondition code, such as 'lock-token-submitted' or * 'no-conflicting-lock'. */ LOCKED(423), /** * The 424 (Failed Dependency) status code means that the method could not be performed on the resource because * the requested action depended on another action and that action failed. For example, if a command in a * PROPPATCH method fails, then, at minimum, the rest of the commands will also fail with 424 (Failed Dependency). */ FAILED_DEPENDENCY(424), /** * 429 Too Many Requests (RFC6585) */ TOO_MANY_REQUESTS(429), /** * The server encountered an unexpected condition which prevented it from fulfilling the request. */ INTERNAL_SERVER_ERROR(500), /** * The server does not support the functionality required to fulfill the request. This is the appropriate * response when the server does not recognize the request method and is not capable of supporting it for any * resource. */ NOT_IMPLEMENTED(501), /** * The server, while acting as a gateway or proxy, received an invalid response from the upstream server it * accessed in attempting to fulfill the request. */ BAD_GATEWAY(502), /** * The server is currently unable to handle the request due to a temporary overloading or maintenance of the * server. The implication is that this is a temporary condition which will be alleviated after some delay. * If known, the length of the delay MAY be indicated in a Retry-After header. If no Retry-After is given, * the client SHOULD handle the response as it would for a 500 response. */ SERVICE_UNAVAILABLE(503), /** * The server, while acting as a gateway or proxy, did not receive a timely response from the upstream server * specified by the URI (e.g. HTTP, FTP, LDAP) or some other auxiliary server (e.g. DNS) it needed to access * in attempting to complete the request. */ GATEWAY_TIMEOUT(504), /** * The server does not support, or refuses to support, the HTTP protocol version that was used in the request * message. The server is indicating that it is unable or unwilling to complete the request using the same major * version as the client, as described in section 3.1, other than with this error message. The response SHOULD * contain an entity describing why that version is not supported and what other protocols are supported by * that server. */ HTTP_VERSION_NOT_SUPPORTED(505), /** * The 507 (Insufficient Storage) status code means the method could not be performed on the resource because * the server is unable to store the representation needed to successfully complete the request. This condition * is considered to be temporary. If the request that received this status code was the result of a user action, * the request MUST NOT be repeated until it is requested by a separate user action. */ INSUFFICIENT_STORAGE(507); private static final Map<Integer, RestStatus> CODE_TO_STATUS; static { RestStatus[] values = values(); Map<Integer, RestStatus> codeToStatus = new HashMap<>(values.length); for (RestStatus value : values) { codeToStatus.put(value.status, value); } CODE_TO_STATUS = unmodifiableMap(codeToStatus); } private int status; RestStatus(int status) { this.status = (short) status; } public int getStatus() { return status; } public static RestStatus readFrom(StreamInput in) throws IOException { return RestStatus.valueOf(in.readString()); } public static void writeTo(StreamOutput out, RestStatus status) throws IOException { out.writeString(status.name()); } public static RestStatus status(int successfulShards, int totalShards, ShardOperationFailedException... failures) { if (failures.length == 0) { if (successfulShards == 0 && totalShards > 0) { return RestStatus.SERVICE_UNAVAILABLE; } return RestStatus.OK; } RestStatus status = RestStatus.OK; if (successfulShards == 0 && totalShards > 0) { for (ShardOperationFailedException failure : failures) { RestStatus shardStatus = failure.status(); if (shardStatus.getStatus() >= status.getStatus()) { status = failure.status(); } } return status; } return status; } /** * Turn a status code into a {@link RestStatus}, returning null if we don't know that status. */ public static RestStatus fromCode(int code) { return CODE_TO_STATUS.get(code); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.execution.scheduler; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.primitives.Ints; import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.SettableFuture; import io.airlift.concurrent.SetThreadName; import io.airlift.stats.TimeStat; import io.airlift.units.Duration; import io.prestosql.Session; import io.prestosql.connector.CatalogName; import io.prestosql.execution.BasicStageStats; import io.prestosql.execution.NodeTaskMap; import io.prestosql.execution.QueryState; import io.prestosql.execution.QueryStateMachine; import io.prestosql.execution.RemoteTask; import io.prestosql.execution.RemoteTaskFactory; import io.prestosql.execution.SqlStageExecution; import io.prestosql.execution.StageId; import io.prestosql.execution.StageInfo; import io.prestosql.execution.StageState; import io.prestosql.execution.TaskStatus; import io.prestosql.execution.buffer.OutputBuffers; import io.prestosql.execution.buffer.OutputBuffers.OutputBufferId; import io.prestosql.failuredetector.FailureDetector; import io.prestosql.metadata.InternalNode; import io.prestosql.server.DynamicFilterService; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ConnectorPartitionHandle; import io.prestosql.split.SplitSource; import io.prestosql.sql.planner.NodePartitionMap; import io.prestosql.sql.planner.NodePartitioningManager; import io.prestosql.sql.planner.PartitioningHandle; import io.prestosql.sql.planner.StageExecutionPlan; import io.prestosql.sql.planner.plan.PlanFragmentId; import io.prestosql.sql.planner.plan.PlanNodeId; import java.net.URI; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.function.Predicate; import java.util.function.Supplier; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.ImmutableMap.toImmutableMap; import static com.google.common.collect.ImmutableSet.toImmutableSet; import static com.google.common.collect.Sets.newConcurrentHashSet; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import static io.airlift.concurrent.MoreFutures.tryGetFutureValue; import static io.airlift.concurrent.MoreFutures.whenAnyComplete; import static io.airlift.http.client.HttpUriBuilder.uriBuilderFrom; import static io.prestosql.SystemSessionProperties.getConcurrentLifespansPerNode; import static io.prestosql.SystemSessionProperties.getWriterMinSize; import static io.prestosql.connector.CatalogName.isInternalSystemConnector; import static io.prestosql.execution.BasicStageStats.aggregateBasicStageStats; import static io.prestosql.execution.SqlStageExecution.createSqlStageExecution; import static io.prestosql.execution.StageState.ABORTED; import static io.prestosql.execution.StageState.CANCELED; import static io.prestosql.execution.StageState.FAILED; import static io.prestosql.execution.StageState.FINISHED; import static io.prestosql.execution.StageState.FLUSHING; import static io.prestosql.execution.StageState.RUNNING; import static io.prestosql.execution.StageState.SCHEDULED; import static io.prestosql.execution.scheduler.SourcePartitionedScheduler.newSourcePartitionedSchedulerAsStageScheduler; import static io.prestosql.spi.StandardErrorCode.GENERIC_INTERNAL_ERROR; import static io.prestosql.spi.StandardErrorCode.NO_NODES_AVAILABLE; import static io.prestosql.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED; import static io.prestosql.sql.planner.SystemPartitioningHandle.FIXED_BROADCAST_DISTRIBUTION; import static io.prestosql.sql.planner.SystemPartitioningHandle.SCALED_WRITER_DISTRIBUTION; import static io.prestosql.sql.planner.SystemPartitioningHandle.SOURCE_DISTRIBUTION; import static io.prestosql.sql.planner.plan.ExchangeNode.Type.REPLICATE; import static io.prestosql.util.Failures.checkCondition; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.function.Function.identity; import static java.util.stream.Collectors.toSet; public class SqlQueryScheduler { private final QueryStateMachine queryStateMachine; private final ExecutionPolicy executionPolicy; private final Map<StageId, SqlStageExecution> stages; private final ExecutorService executor; private final StageId rootStageId; private final Map<StageId, StageScheduler> stageSchedulers; private final Map<StageId, StageLinkage> stageLinkages; private final SplitSchedulerStats schedulerStats; private final boolean summarizeTaskInfo; private final DynamicFilterService dynamicFilterService; private final AtomicBoolean started = new AtomicBoolean(); public static SqlQueryScheduler createSqlQueryScheduler( QueryStateMachine queryStateMachine, StageExecutionPlan plan, NodePartitioningManager nodePartitioningManager, NodeScheduler nodeScheduler, RemoteTaskFactory remoteTaskFactory, Session session, boolean summarizeTaskInfo, int splitBatchSize, ExecutorService queryExecutor, ScheduledExecutorService schedulerExecutor, FailureDetector failureDetector, OutputBuffers rootOutputBuffers, NodeTaskMap nodeTaskMap, ExecutionPolicy executionPolicy, SplitSchedulerStats schedulerStats, DynamicFilterService dynamicFilterService) { SqlQueryScheduler sqlQueryScheduler = new SqlQueryScheduler( queryStateMachine, plan, nodePartitioningManager, nodeScheduler, remoteTaskFactory, session, summarizeTaskInfo, splitBatchSize, queryExecutor, schedulerExecutor, failureDetector, rootOutputBuffers, nodeTaskMap, executionPolicy, schedulerStats, dynamicFilterService); sqlQueryScheduler.initialize(); return sqlQueryScheduler; } private SqlQueryScheduler( QueryStateMachine queryStateMachine, StageExecutionPlan plan, NodePartitioningManager nodePartitioningManager, NodeScheduler nodeScheduler, RemoteTaskFactory remoteTaskFactory, Session session, boolean summarizeTaskInfo, int splitBatchSize, ExecutorService queryExecutor, ScheduledExecutorService schedulerExecutor, FailureDetector failureDetector, OutputBuffers rootOutputBuffers, NodeTaskMap nodeTaskMap, ExecutionPolicy executionPolicy, SplitSchedulerStats schedulerStats, DynamicFilterService dynamicFilterService) { this.queryStateMachine = requireNonNull(queryStateMachine, "queryStateMachine is null"); this.executionPolicy = requireNonNull(executionPolicy, "schedulerPolicyFactory is null"); this.schedulerStats = requireNonNull(schedulerStats, "schedulerStats is null"); this.summarizeTaskInfo = summarizeTaskInfo; this.dynamicFilterService = requireNonNull(dynamicFilterService, "dynamicFilterService is null"); // todo come up with a better way to build this, or eliminate this map ImmutableMap.Builder<StageId, StageScheduler> stageSchedulers = ImmutableMap.builder(); ImmutableMap.Builder<StageId, StageLinkage> stageLinkages = ImmutableMap.builder(); // Only fetch a distribution once per query to assure all stages see the same machine assignments Map<PartitioningHandle, NodePartitionMap> partitioningCache = new HashMap<>(); OutputBufferId rootBufferId = Iterables.getOnlyElement(rootOutputBuffers.getBuffers().keySet()); List<SqlStageExecution> stages = createStages( (fragmentId, tasks, noMoreExchangeLocations) -> updateQueryOutputLocations(queryStateMachine, rootBufferId, tasks, noMoreExchangeLocations), new AtomicInteger(), plan.withBucketToPartition(Optional.of(new int[1])), nodeScheduler, remoteTaskFactory, session, splitBatchSize, partitioningHandle -> partitioningCache.computeIfAbsent(partitioningHandle, handle -> nodePartitioningManager.getNodePartitioningMap(session, handle)), nodePartitioningManager, queryExecutor, schedulerExecutor, failureDetector, nodeTaskMap, stageSchedulers, stageLinkages); SqlStageExecution rootStage = stages.get(0); rootStage.setOutputBuffers(rootOutputBuffers); this.rootStageId = rootStage.getStageId(); this.stages = stages.stream() .collect(toImmutableMap(SqlStageExecution::getStageId, identity())); this.stageSchedulers = stageSchedulers.build(); this.stageLinkages = stageLinkages.build(); this.executor = queryExecutor; } // this is a separate method to ensure that the `this` reference is not leaked during construction private void initialize() { SqlStageExecution rootStage = stages.get(rootStageId); rootStage.addStateChangeListener(state -> { if (state == FINISHED) { queryStateMachine.transitionToFinishing(); } else if (state == CANCELED) { // output stage was canceled queryStateMachine.transitionToCanceled(); } }); for (SqlStageExecution stage : stages.values()) { stage.addStateChangeListener(state -> { if (queryStateMachine.isDone()) { return; } if (state == FAILED) { queryStateMachine.transitionToFailed(stage.getStageInfo().getFailureCause().toException()); } else if (state == ABORTED) { // this should never happen, since abort can only be triggered in query clean up after the query is finished queryStateMachine.transitionToFailed(new PrestoException(GENERIC_INTERNAL_ERROR, "Query stage was aborted")); } else if (queryStateMachine.getQueryState() == QueryState.STARTING) { // if the stage has at least one task, we are running if (stage.hasTasks()) { queryStateMachine.transitionToRunning(); } } }); } // when query is done or any time a stage completes, attempt to transition query to "final query info ready" queryStateMachine.addStateChangeListener(newState -> { if (newState.isDone()) { queryStateMachine.updateQueryInfo(Optional.ofNullable(getStageInfo())); } }); for (SqlStageExecution stage : stages.values()) { stage.addFinalStageInfoListener(status -> queryStateMachine.updateQueryInfo(Optional.ofNullable(getStageInfo()))); } } private static void updateQueryOutputLocations(QueryStateMachine queryStateMachine, OutputBufferId rootBufferId, Set<RemoteTask> tasks, boolean noMoreExchangeLocations) { Set<URI> bufferLocations = tasks.stream() .map(task -> task.getTaskStatus().getSelf()) .map(location -> uriBuilderFrom(location).appendPath("results").appendPath(rootBufferId.toString()).build()) .collect(toImmutableSet()); queryStateMachine.updateOutputLocations(bufferLocations, noMoreExchangeLocations); } private List<SqlStageExecution> createStages( ExchangeLocationsConsumer parent, AtomicInteger nextStageId, StageExecutionPlan plan, NodeScheduler nodeScheduler, RemoteTaskFactory remoteTaskFactory, Session session, int splitBatchSize, Function<PartitioningHandle, NodePartitionMap> partitioningCache, NodePartitioningManager nodePartitioningManager, ExecutorService queryExecutor, ScheduledExecutorService schedulerExecutor, FailureDetector failureDetector, NodeTaskMap nodeTaskMap, ImmutableMap.Builder<StageId, StageScheduler> stageSchedulers, ImmutableMap.Builder<StageId, StageLinkage> stageLinkages) { ImmutableList.Builder<SqlStageExecution> stages = ImmutableList.builder(); StageId stageId = new StageId(queryStateMachine.getQueryId(), nextStageId.getAndIncrement()); SqlStageExecution stage = createSqlStageExecution( stageId, plan.getFragment(), plan.getTables(), remoteTaskFactory, session, summarizeTaskInfo, nodeTaskMap, queryExecutor, failureDetector, dynamicFilterService, schedulerStats); stages.add(stage); // function to create child stages recursively by supplying the bucket partitioning (according to parent's partitioning) Function<Optional<int[]>, Set<SqlStageExecution>> createChildStages = bucketToPartition -> { ImmutableSet.Builder<SqlStageExecution> childStagesBuilder = ImmutableSet.builder(); for (StageExecutionPlan subStagePlan : plan.getSubStages()) { List<SqlStageExecution> subTree = createStages( stage::addExchangeLocations, nextStageId, subStagePlan.withBucketToPartition(bucketToPartition), nodeScheduler, remoteTaskFactory, session, splitBatchSize, partitioningCache, nodePartitioningManager, queryExecutor, schedulerExecutor, failureDetector, nodeTaskMap, stageSchedulers, stageLinkages); stages.addAll(subTree); SqlStageExecution childStage = subTree.get(0); childStagesBuilder.add(childStage); } return childStagesBuilder.build(); }; Set<SqlStageExecution> childStages; PartitioningHandle partitioningHandle = plan.getFragment().getPartitioning(); if (partitioningHandle.equals(SOURCE_DISTRIBUTION)) { // nodes are selected dynamically based on the constraints of the splits and the system load Entry<PlanNodeId, SplitSource> entry = Iterables.getOnlyElement(plan.getSplitSources().entrySet()); PlanNodeId planNodeId = entry.getKey(); SplitSource splitSource = entry.getValue(); Optional<CatalogName> catalogName = Optional.of(splitSource.getCatalogName()) .filter(catalog -> !isInternalSystemConnector(catalog)); NodeSelector nodeSelector = nodeScheduler.createNodeSelector(catalogName); SplitPlacementPolicy placementPolicy = new DynamicSplitPlacementPolicy(nodeSelector, stage::getAllTasks); checkArgument(!plan.getFragment().getStageExecutionDescriptor().isStageGroupedExecution()); childStages = createChildStages.apply(Optional.of(new int[1])); stageSchedulers.put(stageId, newSourcePartitionedSchedulerAsStageScheduler( stage, planNodeId, splitSource, placementPolicy, splitBatchSize, dynamicFilterService, () -> childStages.stream().anyMatch(SqlStageExecution::isAnyTaskBlocked))); } else if (partitioningHandle.equals(SCALED_WRITER_DISTRIBUTION)) { childStages = createChildStages.apply(Optional.of(new int[1])); Supplier<Collection<TaskStatus>> sourceTasksProvider = () -> childStages.stream() .map(SqlStageExecution::getTaskStatuses) .flatMap(List::stream) .collect(toImmutableList()); Supplier<Collection<TaskStatus>> writerTasksProvider = stage::getTaskStatuses; ScaledWriterScheduler scheduler = new ScaledWriterScheduler( stage, sourceTasksProvider, writerTasksProvider, nodeScheduler.createNodeSelector(Optional.empty()), schedulerExecutor, getWriterMinSize(session)); whenAllStages(childStages, StageState::isDone) .addListener(scheduler::finish, directExecutor()); stageSchedulers.put(stageId, scheduler); } else { Optional<int[]> bucketToPartition; Map<PlanNodeId, SplitSource> splitSources = plan.getSplitSources(); if (!splitSources.isEmpty()) { // contains local source List<PlanNodeId> schedulingOrder = plan.getFragment().getPartitionedSources(); Optional<CatalogName> catalogName = partitioningHandle.getConnectorId(); catalogName.orElseThrow(() -> new IllegalArgumentException("No connector ID for partitioning handle: " + partitioningHandle)); List<ConnectorPartitionHandle> connectorPartitionHandles; boolean groupedExecutionForStage = plan.getFragment().getStageExecutionDescriptor().isStageGroupedExecution(); if (groupedExecutionForStage) { connectorPartitionHandles = nodePartitioningManager.listPartitionHandles(session, partitioningHandle); checkState(!ImmutableList.of(NOT_PARTITIONED).equals(connectorPartitionHandles)); } else { connectorPartitionHandles = ImmutableList.of(NOT_PARTITIONED); } BucketNodeMap bucketNodeMap; List<InternalNode> stageNodeList; if (plan.getFragment().getRemoteSourceNodes().stream().allMatch(node -> node.getExchangeType() == REPLICATE)) { // no remote source boolean dynamicLifespanSchedule = plan.getFragment().getStageExecutionDescriptor().isDynamicLifespanSchedule(); bucketNodeMap = nodePartitioningManager.getBucketNodeMap(session, partitioningHandle, dynamicLifespanSchedule); // verify execution is consistent with planner's decision on dynamic lifespan schedule verify(bucketNodeMap.isDynamic() == dynamicLifespanSchedule); stageNodeList = new ArrayList<>(nodeScheduler.createNodeSelector(catalogName).allNodes()); Collections.shuffle(stageNodeList); bucketToPartition = Optional.empty(); } else { // cannot use dynamic lifespan schedule verify(!plan.getFragment().getStageExecutionDescriptor().isDynamicLifespanSchedule()); // remote source requires nodePartitionMap NodePartitionMap nodePartitionMap = partitioningCache.apply(plan.getFragment().getPartitioning()); if (groupedExecutionForStage) { checkState(connectorPartitionHandles.size() == nodePartitionMap.getBucketToPartition().length); } stageNodeList = nodePartitionMap.getPartitionToNode(); bucketNodeMap = nodePartitionMap.asBucketNodeMap(); bucketToPartition = Optional.of(nodePartitionMap.getBucketToPartition()); } stageSchedulers.put(stageId, new FixedSourcePartitionedScheduler( stage, splitSources, plan.getFragment().getStageExecutionDescriptor(), schedulingOrder, stageNodeList, bucketNodeMap, splitBatchSize, getConcurrentLifespansPerNode(session), nodeScheduler.createNodeSelector(catalogName), connectorPartitionHandles, dynamicFilterService)); } else { // all sources are remote NodePartitionMap nodePartitionMap = partitioningCache.apply(plan.getFragment().getPartitioning()); List<InternalNode> partitionToNode = nodePartitionMap.getPartitionToNode(); // todo this should asynchronously wait a standard timeout period before failing checkCondition(!partitionToNode.isEmpty(), NO_NODES_AVAILABLE, "No worker nodes available"); stageSchedulers.put(stageId, new FixedCountScheduler(stage, partitionToNode)); bucketToPartition = Optional.of(nodePartitionMap.getBucketToPartition()); } childStages = createChildStages.apply(bucketToPartition); } stage.addStateChangeListener(newState -> { if (newState == FLUSHING || newState.isDone()) { childStages.forEach(SqlStageExecution::cancel); } }); stageLinkages.put(stageId, new StageLinkage(plan.getFragment().getId(), parent, childStages)); return stages.build(); } public BasicStageStats getBasicStageStats() { List<BasicStageStats> stageStats = stages.values().stream() .map(SqlStageExecution::getBasicStageStats) .collect(toImmutableList()); return aggregateBasicStageStats(stageStats); } public StageInfo getStageInfo() { Map<StageId, StageInfo> stageInfos = stages.values().stream() .map(SqlStageExecution::getStageInfo) .collect(toImmutableMap(StageInfo::getStageId, identity())); return buildStageInfo(rootStageId, stageInfos); } private StageInfo buildStageInfo(StageId stageId, Map<StageId, StageInfo> stageInfos) { StageInfo parent = stageInfos.get(stageId); checkArgument(parent != null, "No stageInfo for %s", parent); List<StageInfo> childStages = stageLinkages.get(stageId).getChildStageIds().stream() .map(childStageId -> buildStageInfo(childStageId, stageInfos)) .collect(toImmutableList()); if (childStages.isEmpty()) { return parent; } return new StageInfo( parent.getStageId(), parent.getState(), parent.getPlan(), parent.getTypes(), parent.getStageStats(), parent.getTasks(), childStages, parent.getTables(), parent.getFailureCause()); } public long getUserMemoryReservation() { return stages.values().stream() .mapToLong(SqlStageExecution::getUserMemoryReservation) .sum(); } public long getTotalMemoryReservation() { return stages.values().stream() .mapToLong(SqlStageExecution::getTotalMemoryReservation) .sum(); } public Duration getTotalCpuTime() { long millis = stages.values().stream() .mapToLong(stage -> stage.getTotalCpuTime().toMillis()) .sum(); return new Duration(millis, MILLISECONDS); } public void start() { if (started.compareAndSet(false, true)) { executor.submit(this::schedule); } } private void schedule() { try (SetThreadName ignored = new SetThreadName("Query-%s", queryStateMachine.getQueryId())) { Set<StageId> completedStages = new HashSet<>(); ExecutionSchedule executionSchedule = executionPolicy.createExecutionSchedule(stages.values()); while (!executionSchedule.isFinished()) { List<ListenableFuture<?>> blockedStages = new ArrayList<>(); for (SqlStageExecution stage : executionSchedule.getStagesToSchedule()) { stage.beginScheduling(); // perform some scheduling work ScheduleResult result = stageSchedulers.get(stage.getStageId()) .schedule(); // modify parent and children based on the results of the scheduling if (result.isFinished()) { stage.schedulingComplete(); } else if (!result.getBlocked().isDone()) { blockedStages.add(result.getBlocked()); } stageLinkages.get(stage.getStageId()) .processScheduleResults(stage.getState(), result.getNewTasks()); schedulerStats.getSplitsScheduledPerIteration().add(result.getSplitsScheduled()); if (result.getBlockedReason().isPresent()) { switch (result.getBlockedReason().get()) { case WRITER_SCALING: // no-op break; case WAITING_FOR_SOURCE: schedulerStats.getWaitingForSource().update(1); break; case SPLIT_QUEUES_FULL: schedulerStats.getSplitQueuesFull().update(1); break; case MIXED_SPLIT_QUEUES_FULL_AND_WAITING_FOR_SOURCE: case NO_ACTIVE_DRIVER_GROUP: break; default: throw new UnsupportedOperationException("Unknown blocked reason: " + result.getBlockedReason().get()); } } } // make sure to update stage linkage at least once per loop to catch async state changes (e.g., partial cancel) for (SqlStageExecution stage : stages.values()) { if (!completedStages.contains(stage.getStageId()) && stage.getState().isDone()) { stageLinkages.get(stage.getStageId()) .processScheduleResults(stage.getState(), ImmutableSet.of()); completedStages.add(stage.getStageId()); } } // wait for a state change and then schedule again if (!blockedStages.isEmpty()) { try (TimeStat.BlockTimer timer = schedulerStats.getSleepTime().time()) { tryGetFutureValue(whenAnyComplete(blockedStages), 1, SECONDS); } for (ListenableFuture<?> blockedStage : blockedStages) { blockedStage.cancel(true); } } } for (SqlStageExecution stage : stages.values()) { StageState state = stage.getState(); if (state != SCHEDULED && state != RUNNING && state != FLUSHING && !state.isDone()) { throw new PrestoException(GENERIC_INTERNAL_ERROR, format("Scheduling is complete, but stage %s is in state %s", stage.getStageId(), state)); } } } catch (Throwable t) { queryStateMachine.transitionToFailed(t); throw t; } finally { RuntimeException closeError = new RuntimeException(); for (StageScheduler scheduler : stageSchedulers.values()) { try { scheduler.close(); } catch (Throwable t) { queryStateMachine.transitionToFailed(t); // Self-suppression not permitted if (closeError != t) { closeError.addSuppressed(t); } } } if (closeError.getSuppressed().length > 0) { throw closeError; } } } public void cancelStage(StageId stageId) { try (SetThreadName ignored = new SetThreadName("Query-%s", queryStateMachine.getQueryId())) { SqlStageExecution sqlStageExecution = stages.get(stageId); SqlStageExecution stage = requireNonNull(sqlStageExecution, () -> format("Stage '%s' does not exist", stageId)); stage.cancel(); } } public void abort() { try (SetThreadName ignored = new SetThreadName("Query-%s", queryStateMachine.getQueryId())) { stages.values().forEach(SqlStageExecution::abort); } } private static ListenableFuture<?> whenAllStages(Collection<SqlStageExecution> stages, Predicate<StageState> predicate) { checkArgument(!stages.isEmpty(), "stages is empty"); Set<StageId> stageIds = newConcurrentHashSet(stages.stream() .map(SqlStageExecution::getStageId) .collect(toSet())); SettableFuture<?> future = SettableFuture.create(); for (SqlStageExecution stage : stages) { stage.addStateChangeListener(state -> { if (predicate.test(state) && stageIds.remove(stage.getStageId()) && stageIds.isEmpty()) { future.set(null); } }); } return future; } private interface ExchangeLocationsConsumer { void addExchangeLocations(PlanFragmentId fragmentId, Set<RemoteTask> tasks, boolean noMoreExchangeLocations); } private static class StageLinkage { private final PlanFragmentId currentStageFragmentId; private final ExchangeLocationsConsumer parent; private final Set<OutputBufferManager> childOutputBufferManagers; private final Set<StageId> childStageIds; public StageLinkage(PlanFragmentId fragmentId, ExchangeLocationsConsumer parent, Set<SqlStageExecution> children) { this.currentStageFragmentId = fragmentId; this.parent = parent; this.childOutputBufferManagers = children.stream() .map(childStage -> { PartitioningHandle partitioningHandle = childStage.getFragment().getPartitioningScheme().getPartitioning().getHandle(); if (partitioningHandle.equals(FIXED_BROADCAST_DISTRIBUTION)) { return new BroadcastOutputBufferManager(childStage::setOutputBuffers); } else if (partitioningHandle.equals(SCALED_WRITER_DISTRIBUTION)) { return new ScaledOutputBufferManager(childStage::setOutputBuffers); } else { int partitionCount = Ints.max(childStage.getFragment().getPartitioningScheme().getBucketToPartition().get()) + 1; return new PartitionedOutputBufferManager(partitioningHandle, partitionCount, childStage::setOutputBuffers); } }) .collect(toImmutableSet()); this.childStageIds = children.stream() .map(SqlStageExecution::getStageId) .collect(toImmutableSet()); } public Set<StageId> getChildStageIds() { return childStageIds; } public void processScheduleResults(StageState newState, Set<RemoteTask> newTasks) { boolean noMoreTasks = !newState.canScheduleMoreTasks(); // Add an exchange location to the parent stage for each new task parent.addExchangeLocations(currentStageFragmentId, newTasks, noMoreTasks); if (!childOutputBufferManagers.isEmpty()) { // Add an output buffer to the child stages for each new task List<OutputBufferId> newOutputBuffers = newTasks.stream() .map(task -> new OutputBufferId(task.getTaskId().getId())) .collect(toImmutableList()); for (OutputBufferManager child : childOutputBufferManagers) { child.addOutputBuffers(newOutputBuffers, noMoreTasks); } } } } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.handler.codec.http2; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.DefaultHttpHeaders; import io.netty.handler.codec.http.FullHttpMessage; import io.netty.handler.codec.http.HttpHeaders; import io.netty.util.AsciiString; import io.netty.util.internal.UnstableApi; import java.util.Iterator; import java.util.Map.Entry; import static io.netty.handler.codec.http2.Http2Error.PROTOCOL_ERROR; import static io.netty.handler.codec.http2.Http2Exception.connectionError; /** * Translate header/data/priority HTTP/2 frame events into HTTP events. Just as {@link InboundHttp2ToHttpAdapter} * may generate multiple {@link FullHttpMessage} objects per stream, this class is more likely to * generate multiple messages per stream because the chances of an HTTP/2 event happening outside * the header/data message flow is more likely. */ @UnstableApi public final class InboundHttp2ToHttpPriorityAdapter extends InboundHttp2ToHttpAdapter { private static final AsciiString OUT_OF_MESSAGE_SEQUENCE_METHOD = new AsciiString( HttpConversionUtil.OUT_OF_MESSAGE_SEQUENCE_METHOD.toString()); private static final AsciiString OUT_OF_MESSAGE_SEQUENCE_PATH = new AsciiString( HttpConversionUtil.OUT_OF_MESSAGE_SEQUENCE_PATH); private static final AsciiString OUT_OF_MESSAGE_SEQUENCE_RETURN_CODE = new AsciiString( HttpConversionUtil.OUT_OF_MESSAGE_SEQUENCE_RETURN_CODE.toString()); private final Http2Connection.PropertyKey outOfMessageFlowHeadersKey; InboundHttp2ToHttpPriorityAdapter(Http2Connection connection, int maxContentLength, boolean validateHttpHeaders, boolean propagateSettings) { super(connection, maxContentLength, validateHttpHeaders, propagateSettings); outOfMessageFlowHeadersKey = connection.newKey(); } private HttpHeaders getOutOfMessageFlowHeaders(Http2Stream stream) { return stream.getProperty(outOfMessageFlowHeadersKey); } private void putOutOfMessageFlowHeaders(Http2Stream stream, HttpHeaders headers) { stream.setProperty(outOfMessageFlowHeadersKey, headers); } private HttpHeaders removeOutOfMessageFlowHeaders(Http2Stream stream) { return stream.removeProperty(outOfMessageFlowHeadersKey); } /** * Get either the header or the trailing headers depending on which is valid to add to * @param msg The message containing the headers and trailing headers * @return The headers object which can be appended to or modified */ private static HttpHeaders getActiveHeaders(FullHttpMessage msg) { return msg.content().isReadable() ? msg.trailingHeaders() : msg.headers(); } /** * This method will add the {@code headers} to the out of order headers map * @param stream The stream associated with {@code headers} * @param headers Newly encountered out of order headers which must be stored for future use */ private void importOutOfMessageFlowHeaders(Http2Stream stream, HttpHeaders headers) { final HttpHeaders outOfMessageFlowHeader = getOutOfMessageFlowHeaders(stream); if (outOfMessageFlowHeader == null) { putOutOfMessageFlowHeaders(stream, headers); } else { outOfMessageFlowHeader.setAll(headers); } } /** * Take any saved out of order headers and export them to {@code headers} * @param stream The stream to search for out of order headers for * @param headers If any out of order headers exist for {@code stream} they will be added to this object */ private void exportOutOfMessageFlowHeaders(Http2Stream stream, final HttpHeaders headers) { final HttpHeaders outOfMessageFlowHeader = getOutOfMessageFlowHeaders(stream); if (outOfMessageFlowHeader != null) { headers.setAll(outOfMessageFlowHeader); } } /** * This will remove all headers which are related to priority tree events * @param headers The headers to remove the priority tree elements from */ private static void removePriorityRelatedHeaders(HttpHeaders headers) { headers.remove(HttpConversionUtil.ExtensionHeaderNames.STREAM_DEPENDENCY_ID.text()); headers.remove(HttpConversionUtil.ExtensionHeaderNames.STREAM_WEIGHT.text()); } /** * Initializes the pseudo header fields for out of message flow HTTP/2 headers * @param headers The headers to be initialized with pseudo header values */ private void initializePseudoHeaders(Http2Headers headers) { if (connection.isServer()) { headers.method(OUT_OF_MESSAGE_SEQUENCE_METHOD).path(OUT_OF_MESSAGE_SEQUENCE_PATH); } else { headers.status(OUT_OF_MESSAGE_SEQUENCE_RETURN_CODE); } } /** * Add all the HTTP headers into the HTTP/2 headers object * @param httpHeaders The HTTP headers to translate to HTTP/2 * @param http2Headers The target HTTP/2 headers */ private static void addHttpHeadersToHttp2Headers(HttpHeaders httpHeaders, final Http2Headers http2Headers) { Iterator<Entry<CharSequence, CharSequence>> iter = httpHeaders.iteratorCharSequence(); while (iter.hasNext()) { Entry<CharSequence, CharSequence> entry = iter.next(); http2Headers.add(AsciiString.of(entry.getKey()), AsciiString.of(entry.getValue())); } } @Override protected void fireChannelRead(ChannelHandlerContext ctx, FullHttpMessage msg, boolean release, Http2Stream stream) { exportOutOfMessageFlowHeaders(stream, getActiveHeaders(msg)); super.fireChannelRead(ctx, msg, release, stream); } @Override protected FullHttpMessage processHeadersBegin(ChannelHandlerContext ctx, Http2Stream stream, Http2Headers headers, boolean endOfStream, boolean allowAppend, boolean appendToTrailer) throws Http2Exception { FullHttpMessage msg = super.processHeadersBegin(ctx, stream, headers, endOfStream, allowAppend, appendToTrailer); if (msg != null) { exportOutOfMessageFlowHeaders(stream, getActiveHeaders(msg)); } return msg; } @Override public void onPriorityTreeParentChanged(Http2Stream stream, Http2Stream oldParent) { Http2Stream parent = stream.parent(); FullHttpMessage msg = getMessage(stream); if (msg == null) { // msg may be null if a HTTP/2 frame event is received outside the HTTP message flow // For example a PRIORITY frame can be received in any state but the HTTP message flow // takes place while the stream is OPEN. if (parent != null && !parent.equals(connection.connectionStream())) { HttpHeaders headers = new DefaultHttpHeaders(); headers.setInt(HttpConversionUtil.ExtensionHeaderNames.STREAM_DEPENDENCY_ID.text(), parent.id()); importOutOfMessageFlowHeaders(stream, headers); } } else { if (parent == null) { removePriorityRelatedHeaders(msg.headers()); removePriorityRelatedHeaders(msg.trailingHeaders()); } else if (!parent.equals(connection.connectionStream())) { HttpHeaders headers = getActiveHeaders(msg); headers.setInt(HttpConversionUtil.ExtensionHeaderNames.STREAM_DEPENDENCY_ID.text(), parent.id()); } } } @Override public void onWeightChanged(Http2Stream stream, short oldWeight) { FullHttpMessage msg = getMessage(stream); final HttpHeaders headers; if (msg == null) { // msg may be null if a HTTP/2 frame event in received outside the HTTP message flow // For example a PRIORITY frame can be received in any state // and the HTTP message flow exists in OPEN. headers = new DefaultHttpHeaders(); importOutOfMessageFlowHeaders(stream, headers); } else { headers = getActiveHeaders(msg); } headers.setShort(HttpConversionUtil.ExtensionHeaderNames.STREAM_WEIGHT.text(), stream.weight()); } @Override public void onPriorityRead(ChannelHandlerContext ctx, int streamId, int streamDependency, short weight, boolean exclusive) throws Http2Exception { Http2Stream stream = connection.stream(streamId); if (stream == null) { return; } FullHttpMessage msg = getMessage(stream); if (msg == null) { HttpHeaders httpHeaders = removeOutOfMessageFlowHeaders(stream); if (httpHeaders == null) { throw connectionError(PROTOCOL_ERROR, "Priority Frame recieved for unknown stream id %d", streamId); } Http2Headers http2Headers = new DefaultHttp2Headers(validateHttpHeaders, httpHeaders.size()); initializePseudoHeaders(http2Headers); addHttpHeadersToHttp2Headers(httpHeaders, http2Headers); msg = newMessage(stream, http2Headers, validateHttpHeaders, ctx.alloc()); fireChannelRead(ctx, msg, false, stream); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.box.server.persistence.file; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.RandomAccessFile; import java.nio.channels.Channels; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import org.waveprotocol.box.common.Receiver; import org.waveprotocol.box.server.persistence.PersistenceException; import org.waveprotocol.box.server.persistence.protos.ProtoDeltaStoreData.ProtoTransformedWaveletDelta; import org.waveprotocol.box.server.persistence.protos.ProtoDeltaStoreDataSerializer; import org.waveprotocol.box.server.shutdown.LifeCycle; import org.waveprotocol.box.server.shutdown.ShutdownPriority; import org.waveprotocol.box.server.shutdown.Shutdownable; import org.waveprotocol.box.server.swell.WaveletContributions; import org.waveprotocol.box.server.waveserver.AppliedDeltaUtil; import org.waveprotocol.box.server.waveserver.ByteStringMessage; import org.waveprotocol.box.server.waveserver.DeltaStore.DeltasAccess; import org.waveprotocol.box.server.waveserver.DeltaStore.Snapshot; import org.waveprotocol.box.server.waveserver.WaveletDeltaRecord; import org.waveprotocol.wave.federation.Proto.ProtocolAppliedWaveletDelta; import org.waveprotocol.wave.model.id.WaveletName; import org.waveprotocol.wave.model.operation.wave.TransformedWaveletDelta; import org.waveprotocol.wave.model.util.Pair; import org.waveprotocol.wave.model.version.HashedVersion; import org.waveprotocol.wave.model.wave.data.WaveletData; import org.waveprotocol.wave.util.logging.Log; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.protobuf.ByteString; import com.google.protobuf.InvalidProtocolBufferException; /** * A flat file based implementation of DeltasAccess. This class provides a storage backend for the * deltas in a single wavelet. * * The file starts with a header. The header contains the version of the file protocol. After the * version, the file contains a sequence of delta records. Each record contains a header followed * by a WaveletDeltaRecord. * * A particular FileDeltaCollection instance assumes that it's <em>the only one</em> reading and * writing a particular wavelet. The methods are <em>not</em> multithread-safe. * * See this document for design specifics: * https://sites.google.com/a/waveprotocol.org/wave-protocol/protocol/design-proposals/wave-store-design-for-wave-in-a-box * * @author josephg@gmail.com (Joseph Gentle) */ public class FileDeltaCollection implements DeltasAccess { public static final String DELTAS_FILE_SUFFIX = ".deltas"; public static final String INDEX_FILE_SUFFIX = ".index"; private static final byte[] FILE_MAGIC_BYTES = new byte[]{'W', 'A', 'V', 'E'}; private static final int FILE_PROTOCOL_VERSION = 1; private static final int FILE_HEADER_LENGTH = 8; private static final int DELTA_PROTOCOL_VERSION = 1; private static final Log LOG = Log.get(FileDeltaCollection.class); private final WaveletName waveletName; private final RandomAccessFile file; private final DeltaIndex index; private HashedVersion endVersion; private boolean isOpen; final private LifeCycle lifeCycle = new LifeCycle(FileDeltaCollection.class.getSimpleName(), ShutdownPriority.Storage, new Shutdownable() { @Override public void shutdown() throws Exception { close(); } }); /** * A single record in the delta file. */ private class DeltaHeader { /** Length in bytes of the header */ public static final int HEADER_LENGTH = 12; /** The protocol version of the remaining fields. For now, must be 1. */ public final int protoVersion; /** The length of the applied delta segment, in bytes. */ public final int appliedDeltaLength; public final int transformedDeltaLength; public DeltaHeader(int protoVersion, int appliedDeltaLength, int transformedDeltaLength) { this.protoVersion = protoVersion; this.appliedDeltaLength = appliedDeltaLength; this.transformedDeltaLength = transformedDeltaLength; } public void checkVersion() throws IOException { if (protoVersion != DELTA_PROTOCOL_VERSION) { throw new IOException("Invalid delta header"); } } } /** * Opens a file delta collection. * * @param waveletName name of the wavelet to open * @param basePath base path of files * @return an open collection * @throws IOException */ public static FileDeltaCollection open(WaveletName waveletName, String basePath) throws IOException { Preconditions.checkNotNull(waveletName, "null wavelet name"); RandomAccessFile deltaFile = FileUtils.getOrCreateFile(deltasFile(basePath, waveletName)); setOrCheckFileHeader(deltaFile); DeltaIndex index = new DeltaIndex(indexFile(basePath, waveletName)); FileDeltaCollection collection = new FileDeltaCollection(waveletName, deltaFile, index); index.openForCollection(collection); collection.initializeEndVersionAndTruncateTrailingJunk(); return collection; } /** * Delete the delta files from disk. * * @throws PersistenceException */ public static void delete(WaveletName waveletName, String basePath) throws PersistenceException { String error = ""; File deltas = deltasFile(basePath, waveletName); if (deltas.exists()) { if (!deltas.delete()) { error += "Could not delete deltas file: " + deltas.getAbsolutePath() + ". "; } } File index = indexFile(basePath, waveletName); if (index.exists()) { if (!index.delete()) { error += "Could not delete index file: " + index.getAbsolutePath(); } } if (!error.isEmpty()) { throw new PersistenceException(error); } } /** * Create a new file delta collection for the given wavelet. * * @param waveletName name of the wavelet * @param deltaFile the file of deltas * @param index index into deltas */ public FileDeltaCollection(WaveletName waveletName, RandomAccessFile deltaFile, DeltaIndex index) { this.waveletName = waveletName; this.file = deltaFile; this.index = index; this.isOpen = true; lifeCycle.start(); } @Override public WaveletName getWaveletName() { return waveletName; } @Override public HashedVersion getEndVersion() { return endVersion; } @Override public WaveletDeltaRecord getDelta(long version) throws IOException { lifeCycle.enter(); try { checkIsOpen(); return seekToRecord(version) ? readRecord() : null; } finally { lifeCycle.leave(); } } @Override public WaveletDeltaRecord getDeltaByEndVersion(long version) throws IOException { lifeCycle.enter(); try { checkIsOpen(); return seekToEndRecord(version) ? readRecord() : null; } finally { lifeCycle.leave(); } } @Override public ByteStringMessage<ProtocolAppliedWaveletDelta> getAppliedDelta(long version) throws IOException { lifeCycle.enter(); try { checkIsOpen(); return seekToRecord(version) ? readAppliedDeltaFromRecord() : null; } finally { lifeCycle.leave(); } } @Override public TransformedWaveletDelta getTransformedDelta(long version) throws IOException { lifeCycle.enter(); try { checkIsOpen(); return seekToRecord(version) ? readTransformedDeltaFromRecord() : null; } finally { lifeCycle.leave(); } } @Override public HashedVersion getAppliedAtVersion(long version) throws IOException { lifeCycle.enter(); try { checkIsOpen(); ByteStringMessage<ProtocolAppliedWaveletDelta> applied = getAppliedDelta(version); return (applied != null) ? AppliedDeltaUtil.getHashedVersionAppliedAt(applied) : null; } finally { lifeCycle.leave(); } } @Override public long getDeltasInRange(long startVersion, long endVersion, Receiver<WaveletDeltaRecord> receiver) throws IOException { checkIsOpen(); if (this.endVersion == null) return 0; Preconditions.checkState(0 <= startVersion && startVersion < endVersion && endVersion <= this.endVersion.getVersion(), "Invalid delta range"); long version = startVersion; while (seekToRecord(version) && version <= endVersion) { if (!receiver.put(readRecord())) { throw new IllegalStateException("Error processing deltas from file"); } version++; } return version--; } @Override public HashedVersion getResultingVersion(long version) throws IOException { lifeCycle.enter(); try { checkIsOpen(); TransformedWaveletDelta transformed = getTransformedDelta(version); return (transformed != null) ? transformed.getResultingVersion() : null; } finally { lifeCycle.leave(); } } @Override public void close() throws IOException { file.close(); index.close(); endVersion = null; isOpen = false; } @Override public void append(Collection<WaveletDeltaRecord> deltas) throws PersistenceException { lifeCycle.enter(); checkIsOpen(); try { file.seek(file.length()); WaveletDeltaRecord lastDelta = null; for (WaveletDeltaRecord delta : deltas) { index.addDelta(delta.getTransformedDelta().getAppliedAtVersion(), delta.getTransformedDelta().size(), file.getFilePointer()); writeDelta(delta); lastDelta = delta; } // fsync() before returning. file.getChannel().force(true); endVersion = lastDelta.getTransformedDelta().getResultingVersion(); } catch (IOException e) { throw new PersistenceException(e); } finally { lifeCycle.leave(); } } @Override public boolean isEmpty() { checkIsOpen(); return index.length() == 0; } /** * Creates a new iterator to move over the positions of the deltas in the file. * * Each pair returned is ((version, numOperations), offset). * @throws IOException */ Iterable<Pair<Pair<Long,Integer>, Long>> getOffsetsIterator() throws IOException { checkIsOpen(); return new Iterable<Pair<Pair<Long, Integer>, Long>>() { @Override public Iterator<Pair<Pair<Long, Integer>, Long>> iterator() { return new Iterator<Pair<Pair<Long, Integer>, Long>>() { Pair<Pair<Long, Integer>, Long> nextRecord; long nextPosition = FILE_HEADER_LENGTH; @Override public void remove() { throw new UnsupportedOperationException(); } @Override public Pair<Pair<Long, Integer>, Long> next() { Pair<Pair<Long, Integer>, Long> record = nextRecord; nextRecord = null; return record; } @Override public boolean hasNext() { // We're using hasNext to prime the next call to next(). This works because in practice // any call to next() is preceeded by at least one call to hasNext(). // We need to actually read the record here because hasNext() should return false // if there's any incomplete data at the end of the file. try { if (file.length() <= nextPosition) { // End of file. return false; } } catch (IOException e) { throw new RuntimeException("Could not get file position", e); } if (nextRecord == null) { // Read the next record try { file.seek(nextPosition); TransformedWaveletDelta transformed = readTransformedDeltaFromRecord(); nextRecord = Pair.of(Pair.of(transformed.getAppliedAtVersion(), transformed.size()), nextPosition); nextPosition = file.getFilePointer(); } catch (IOException e) { // The next entry is invalid. There was probably a write error / crash. LOG.severe("Error reading delta file for " + waveletName + " starting at " + nextPosition, e); return false; } } return true; } }; } }; } @VisibleForTesting static final File deltasFile(String basePath, WaveletName waveletName) { String waveletPathPrefix = FileUtils.waveletNameToPathSegment(waveletName); return new File(basePath, waveletPathPrefix + DELTAS_FILE_SUFFIX); } @VisibleForTesting static final File indexFile(String basePath, WaveletName waveletName) { String waveletPathPrefix = FileUtils.waveletNameToPathSegment(waveletName); return new File(basePath, waveletPathPrefix + INDEX_FILE_SUFFIX); } /** * Checks that a file has a valid deltas header, adding the header if the * file is shorter than the header. */ private static void setOrCheckFileHeader(RandomAccessFile file) throws IOException { Preconditions.checkNotNull(file); file.seek(0); if (file.length() < FILE_HEADER_LENGTH) { // The file is new. Insert a header. file.write(FILE_MAGIC_BYTES); file.writeInt(FILE_PROTOCOL_VERSION); } else { byte[] magic = new byte[4]; file.readFully(magic); if (!Arrays.equals(FILE_MAGIC_BYTES, magic)) { throw new IOException("Delta file magic bytes are incorrect"); } int version = file.readInt(); if (version != FILE_PROTOCOL_VERSION) { throw new IOException(String.format("File protocol version mismatch - expected %d got %d", FILE_PROTOCOL_VERSION, version)); } } } private void checkIsOpen() { Preconditions.checkState(isOpen, "Delta collection closed"); } /** * Seek to the start of a delta record. Returns false if the record doesn't exist. */ private boolean seekToRecord(long version) throws IOException { Preconditions.checkArgument(version >= 0, "Version can't be negative"); long offset = index.getOffsetForVersion(version); return seekTo(offset); } /** * Seek to the start of a delta record given its end version. * Returns false if the record doesn't exist. */ private boolean seekToEndRecord(long version) throws IOException { Preconditions.checkArgument(version >= 0, "Version can't be negative"); long offset = index.getOffsetForEndVersion(version); return seekTo(offset); } private boolean seekTo(long offset) throws IOException { if (offset == DeltaIndex.NO_RECORD_FOR_VERSION) { // There's no record for the specified version. return false; } else { file.seek(offset); return true; } } /** * Read a record and return it. */ private WaveletDeltaRecord readRecord() throws IOException { DeltaHeader header = readDeltaHeader(); ByteStringMessage<ProtocolAppliedWaveletDelta> appliedDelta = readAppliedDelta(header.appliedDeltaLength); TransformedWaveletDelta transformedDelta = readTransformedWaveletDelta( header.transformedDeltaLength); return new WaveletDeltaRecord(AppliedDeltaUtil.getHashedVersionAppliedAt(appliedDelta), appliedDelta, transformedDelta); } /** * Reads a record, and only parses & returns the applied data field. */ private ByteStringMessage<ProtocolAppliedWaveletDelta> readAppliedDeltaFromRecord() throws IOException { DeltaHeader header = readDeltaHeader(); ByteStringMessage<ProtocolAppliedWaveletDelta> appliedDelta = readAppliedDelta(header.appliedDeltaLength); file.skipBytes(header.transformedDeltaLength); return appliedDelta; } /** * Reads a record, and only parses & returns the transformed data field. */ private TransformedWaveletDelta readTransformedDeltaFromRecord() throws IOException { DeltaHeader header = readDeltaHeader(); file.skipBytes(header.appliedDeltaLength); TransformedWaveletDelta transformedDelta = readTransformedWaveletDelta( header.transformedDeltaLength); return transformedDelta; } // *** Low level data reading methods /** Read a header from the file. Does not move the file pointer before reading. */ private DeltaHeader readDeltaHeader() throws IOException { int version = file.readInt(); if (version != DELTA_PROTOCOL_VERSION) { throw new IOException("Delta header invalid"); } int appliedDeltaLength = file.readInt(); int transformedDeltaLength = file.readInt(); DeltaHeader deltaHeader = new DeltaHeader(version, appliedDeltaLength, transformedDeltaLength); deltaHeader.checkVersion(); // Verify the file size. long remaining = file.length() - file.getFilePointer(); long missing = (appliedDeltaLength + transformedDeltaLength) - remaining; if (missing > 0) { throw new IOException("File is corrupted, missing " + missing + " bytes"); } return deltaHeader; } /** * Write a header to the current location in the file */ private void writeDeltaHeader(DeltaHeader header) throws IOException { file.writeInt(header.protoVersion); file.writeInt(header.appliedDeltaLength); file.writeInt(header.transformedDeltaLength); } /** * Read the applied delta at the current file position. After method call, * file position is directly after applied delta field. */ private ByteStringMessage<ProtocolAppliedWaveletDelta> readAppliedDelta(int length) throws IOException { if (length == 0) { return null; } byte[] bytes = new byte[length]; file.readFully(bytes); try { return ByteStringMessage.parseProtocolAppliedWaveletDelta(ByteString.copyFrom(bytes)); } catch (InvalidProtocolBufferException e) { throw new IOException(e); } } /** * Write an applied delta to the current position in the file. Returns number of bytes written. */ private int writeAppliedDelta(ByteStringMessage<ProtocolAppliedWaveletDelta> delta) throws IOException { if (delta != null) { byte[] bytes = delta.getByteArray(); file.write(bytes); return bytes.length; } else { return 0; } } /** * Read a {@link TransformedWaveletDelta} from the current location in the file. */ private TransformedWaveletDelta readTransformedWaveletDelta(int transformedDeltaLength) throws IOException { if(transformedDeltaLength < 0) { throw new IOException("Invalid delta length"); } byte[] bytes = new byte[transformedDeltaLength]; file.readFully(bytes); ProtoTransformedWaveletDelta delta; try { delta = ProtoTransformedWaveletDelta.parseFrom(bytes); } catch (InvalidProtocolBufferException e) { throw new IOException(e); } return ProtoDeltaStoreDataSerializer.deserialize(delta); } /** * Write a {@link TransformedWaveletDelta} to the file at the current location. * @return length of written data */ private int writeTransformedWaveletDelta(TransformedWaveletDelta delta) throws IOException { long startingPosition = file.getFilePointer(); ProtoTransformedWaveletDelta protoDelta = ProtoDeltaStoreDataSerializer.serialize(delta); OutputStream stream = Channels.newOutputStream(file.getChannel()); protoDelta.writeTo(stream); return (int) (file.getFilePointer() - startingPosition); } /** * Read a delta to the file. Does not move the file pointer before writing. Returns number of * bytes written. */ private long writeDelta(WaveletDeltaRecord delta) throws IOException { // We'll write zeros in place of the header and come back & write it at the end. long headerPointer = file.getFilePointer(); file.write(new byte[DeltaHeader.HEADER_LENGTH]); int appliedLength = writeAppliedDelta(delta.getAppliedDelta()); int transformedLength = writeTransformedWaveletDelta(delta.getTransformedDelta()); long endPointer = file.getFilePointer(); file.seek(headerPointer); writeDeltaHeader(new DeltaHeader(DELTA_PROTOCOL_VERSION, appliedLength, transformedLength)); file.seek(endPointer); return endPointer - headerPointer; } /** * Reads the last complete record in the deltas file and truncates any trailing junk. */ private void initializeEndVersionAndTruncateTrailingJunk() throws IOException { long numRecords = index.length(); if (numRecords >= 1) { endVersion = getDeltaByEndVersion(numRecords).getResultingVersion(); } else { endVersion = null; } // The file's position should be at the end. Truncate any // trailing junk such as from a partially completed write. file.setLength(file.getFilePointer()); } @Override public long getAllDeltas(Receiver<WaveletDeltaRecord> receiver) throws IOException { return getDeltasInRange(0, endVersion.getVersion(), receiver); } @Override public Snapshot loadSnapshot() throws PersistenceException { // Not supported! return null; } @Override public void storeSnapshot(WaveletData waveletData) throws PersistenceException { // No-op } @Override public WaveletDeltaRecord getLastDelta() throws IOException { return getDeltaByEndVersion(endVersion.getVersion()); } @Override public WaveletContributions loadContributions() throws PersistenceException { // Not supported! return null; } @Override public WaveletContributions loadContributionsForVersion(long version) throws PersistenceException { // Not supported! return null; } @Override public void storeContributions(WaveletContributions contributions) throws PersistenceException { // Not supported! } }
/* * Pathfinder * Core application for storage and analysis of IT landscape data * * OpenAPI spec version: 5.0.813 * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package net.leanix.api.models; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; import net.leanix.api.models.ApiError; import net.leanix.api.models.AuthorizationRole; /** * AuthorizationRolesResponse */ public class AuthorizationRolesResponse { @JsonProperty("status") private String status = null; @JsonProperty("type") private String type = null; @JsonProperty("message") private String message = null; @JsonProperty("errors") private List<ApiError> errors = new ArrayList<ApiError>(); @JsonProperty("total") private Long total = null; @JsonProperty("data") private List<AuthorizationRole> data = new ArrayList<AuthorizationRole>(); public AuthorizationRolesResponse status(String status) { this.status = status; return this; } /** * Get status * @return status **/ @ApiModelProperty(example = "null", value = "") public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public AuthorizationRolesResponse type(String type) { this.type = type; return this; } /** * Get type * @return type **/ @ApiModelProperty(example = "null", value = "") public String getType() { return type; } public void setType(String type) { this.type = type; } public AuthorizationRolesResponse message(String message) { this.message = message; return this; } /** * Get message * @return message **/ @ApiModelProperty(example = "null", value = "") public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public AuthorizationRolesResponse errors(List<ApiError> errors) { this.errors = errors; return this; } public AuthorizationRolesResponse addErrorsItem(ApiError errorsItem) { this.errors.add(errorsItem); return this; } /** * Get errors * @return errors **/ @ApiModelProperty(example = "null", value = "") public List<ApiError> getErrors() { return errors; } public void setErrors(List<ApiError> errors) { this.errors = errors; } public AuthorizationRolesResponse total(Long total) { this.total = total; return this; } /** * Get total * @return total **/ @ApiModelProperty(example = "null", value = "") public Long getTotal() { return total; } public void setTotal(Long total) { this.total = total; } public AuthorizationRolesResponse data(List<AuthorizationRole> data) { this.data = data; return this; } public AuthorizationRolesResponse addDataItem(AuthorizationRole dataItem) { this.data.add(dataItem); return this; } /** * Get data * @return data **/ @ApiModelProperty(example = "null", value = "") public List<AuthorizationRole> getData() { return data; } public void setData(List<AuthorizationRole> data) { this.data = data; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AuthorizationRolesResponse authorizationRolesResponse = (AuthorizationRolesResponse) o; return Objects.equals(this.status, authorizationRolesResponse.status) && Objects.equals(this.type, authorizationRolesResponse.type) && Objects.equals(this.message, authorizationRolesResponse.message) && Objects.equals(this.errors, authorizationRolesResponse.errors) && Objects.equals(this.total, authorizationRolesResponse.total) && Objects.equals(this.data, authorizationRolesResponse.data); } @Override public int hashCode() { return Objects.hash(status, type, message, errors, total, data); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class AuthorizationRolesResponse {\n"); sb.append(" status: ").append(toIndentedString(status)).append("\n"); sb.append(" type: ").append(toIndentedString(type)).append("\n"); sb.append(" message: ").append(toIndentedString(message)).append("\n"); sb.append(" errors: ").append(toIndentedString(errors)).append("\n"); sb.append(" total: ").append(toIndentedString(total)).append("\n"); sb.append(" data: ").append(toIndentedString(data)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.snapshots; import com.carrotsearch.hppc.IntOpenHashSet; import com.carrotsearch.hppc.IntSet; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.*; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.*; import org.elasticsearch.cluster.metadata.RestoreMetaData.ShardRestoreStatus; import org.elasticsearch.cluster.routing.*; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.settings.ClusterDynamicSettings; import org.elasticsearch.cluster.settings.DynamicSettings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.*; import java.io.IOException; import java.util.*; import java.util.concurrent.CopyOnWriteArrayList; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Maps.newHashMap; import static com.google.common.collect.Sets.newHashSet; import static org.elasticsearch.cluster.metadata.IndexMetaData.*; import static org.elasticsearch.cluster.metadata.MetaDataIndexStateService.INDEX_CLOSED_BLOCK; /** * Service responsible for restoring snapshots * <p/> * Restore operation is performed in several stages. * <p/> * First {@link #restoreSnapshot(RestoreRequest, org.elasticsearch.action.ActionListener))} * method reads information about snapshot and metadata from repository. In update cluster state task it checks restore * preconditions, restores global state if needed, creates {@link RestoreMetaData} record with list of shards that needs * to be restored and adds this shard to the routing table using {@link RoutingTable.Builder#addAsRestore(IndexMetaData, RestoreSource)} * method. * <p/> * Individual shards are getting restored as part of normal recovery process in * {@link org.elasticsearch.index.gateway.IndexShardGatewayService#recover(boolean, org.elasticsearch.index.gateway.IndexShardGatewayService.RecoveryListener)} * method, which detects that shard should be restored from snapshot rather than recovered from gateway by looking * at the {@link org.elasticsearch.cluster.routing.ShardRouting#restoreSource()} property. If this property is not null * {@code recover} method uses {@link org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService#restore(org.elasticsearch.indices.recovery.RecoveryState)} * method to start shard restore process. * <p/> * At the end of the successful restore process {@code IndexShardSnapshotAndRestoreService} calls {@link #indexShardRestoreCompleted(SnapshotId, ShardId)}, * which updates {@link RestoreMetaData} in cluster state or removes it when all shards are completed. In case of * restore failure a normal recovery fail-over process kicks in. */ public class RestoreService extends AbstractComponent implements ClusterStateListener { public static final String UPDATE_RESTORE_ACTION_NAME = "internal:cluster/snapshot/update_restore"; private static final ImmutableSet<String> UNMODIFIABLE_SETTINGS = ImmutableSet.of( SETTING_NUMBER_OF_SHARDS, SETTING_VERSION_CREATED, SETTING_UUID, SETTING_CREATION_DATE); // It's OK to change some settings, but we shouldn't allow simply removing them private static final ImmutableSet<String> UNREMOVABLE_SETTINGS = ImmutableSet.<String>builder() .addAll(UNMODIFIABLE_SETTINGS) .add(SETTING_NUMBER_OF_REPLICAS) .add(SETTING_AUTO_EXPAND_REPLICAS) .build(); private final ClusterService clusterService; private final RepositoriesService repositoriesService; private final TransportService transportService; private final AllocationService allocationService; private final MetaDataCreateIndexService createIndexService; private final DynamicSettings dynamicSettings; private final CopyOnWriteArrayList<ActionListener<RestoreCompletionResponse>> listeners = new CopyOnWriteArrayList<>(); @Inject public RestoreService(Settings settings, ClusterService clusterService, RepositoriesService repositoriesService, TransportService transportService, AllocationService allocationService, MetaDataCreateIndexService createIndexService, @ClusterDynamicSettings DynamicSettings dynamicSettings) { super(settings); this.clusterService = clusterService; this.repositoriesService = repositoriesService; this.transportService = transportService; this.allocationService = allocationService; this.createIndexService = createIndexService; this.dynamicSettings = dynamicSettings; transportService.registerHandler(UPDATE_RESTORE_ACTION_NAME, new UpdateRestoreStateRequestHandler()); clusterService.add(this); } /** * Restores snapshot specified in the restore request. * * @param request restore request * @param listener restore listener */ public void restoreSnapshot(final RestoreRequest request, final ActionListener<RestoreInfo> listener) { try { // Read snapshot info and metadata from the repository Repository repository = repositoriesService.repository(request.repository()); final SnapshotId snapshotId = new SnapshotId(request.repository(), request.name()); final Snapshot snapshot = repository.readSnapshot(snapshotId); ImmutableList<String> filteredIndices = SnapshotUtils.filterIndices(snapshot.indices(), request.indices(), request.indicesOptions()); final MetaData metaData = repository.readSnapshotMetaData(snapshotId, filteredIndices); // Make sure that we can restore from this snapshot validateSnapshotRestorable(snapshotId, snapshot); // Find list of indices that we need to restore final Map<String, String> renamedIndices = renamedIndices(request, filteredIndices); // Now we can start the actual restore process by adding shards to be recovered in the cluster state // and updating cluster metadata (global and index) as needed clusterService.submitStateUpdateTask(request.cause(), new TimeoutClusterStateUpdateTask() { RestoreInfo restoreInfo = null; @Override public ClusterState execute(ClusterState currentState) { // Check if another restore process is already running - cannot run two restore processes at the // same time RestoreMetaData restoreMetaData = currentState.metaData().custom(RestoreMetaData.TYPE); if (restoreMetaData != null && !restoreMetaData.entries().isEmpty()) { throw new ConcurrentSnapshotExecutionException(snapshotId, "Restore process is already running in this cluster"); } // Updating cluster state MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); RoutingTable.Builder rtBuilder = RoutingTable.builder(currentState.routingTable()); final ImmutableMap<ShardId, RestoreMetaData.ShardRestoreStatus> shards; Set<String> aliases = newHashSet(); if (!renamedIndices.isEmpty()) { // We have some indices to restore ImmutableMap.Builder<ShardId, RestoreMetaData.ShardRestoreStatus> shardsBuilder = ImmutableMap.builder(); for (Map.Entry<String, String> indexEntry : renamedIndices.entrySet()) { String index = indexEntry.getValue(); boolean partial = checkPartial(index); RestoreSource restoreSource = new RestoreSource(snapshotId, index); String renamedIndex = indexEntry.getKey(); IndexMetaData snapshotIndexMetaData = metaData.index(index); snapshotIndexMetaData = updateIndexSettings(snapshotIndexMetaData, request.indexSettings, request.ignoreIndexSettings); // Check that the index is closed or doesn't exist IndexMetaData currentIndexMetaData = currentState.metaData().index(renamedIndex); IntSet ignoreShards = new IntOpenHashSet(); if (currentIndexMetaData == null) { // Index doesn't exist - create it and start recovery // Make sure that the index we are about to create has a validate name createIndexService.validateIndexName(renamedIndex, currentState); IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN).index(renamedIndex); if (!request.includeAliases() && !snapshotIndexMetaData.aliases().isEmpty()) { // Remove all aliases - they shouldn't be restored indexMdBuilder.removeAllAliases(); } else { for (ObjectCursor<String> alias : snapshotIndexMetaData.aliases().keys()) { aliases.add(alias.value); } } IndexMetaData updatedIndexMetaData = indexMdBuilder.build(); if (partial) { populateIgnoredShards(index, ignoreShards); } rtBuilder.addAsNewRestore(updatedIndexMetaData, restoreSource, ignoreShards); mdBuilder.put(updatedIndexMetaData, true); } else { validateExistingIndex(currentIndexMetaData, snapshotIndexMetaData, renamedIndex, partial); // Index exists and it's closed - open it in metadata and start recovery IndexMetaData.Builder indexMdBuilder = IndexMetaData.builder(snapshotIndexMetaData).state(IndexMetaData.State.OPEN); indexMdBuilder.version(Math.max(snapshotIndexMetaData.version(), currentIndexMetaData.version() + 1)); if (!request.includeAliases()) { // Remove all snapshot aliases if (!snapshotIndexMetaData.aliases().isEmpty()) { indexMdBuilder.removeAllAliases(); } /// Add existing aliases for (ObjectCursor<AliasMetaData> alias : currentIndexMetaData.aliases().values()) { indexMdBuilder.putAlias(alias.value); } } else { for (ObjectCursor<String> alias : snapshotIndexMetaData.aliases().keys()) { aliases.add(alias.value); } } IndexMetaData updatedIndexMetaData = indexMdBuilder.index(renamedIndex).build(); rtBuilder.addAsRestore(updatedIndexMetaData, restoreSource); blocks.removeIndexBlock(renamedIndex, INDEX_CLOSED_BLOCK); mdBuilder.put(updatedIndexMetaData, true); } for (int shard = 0; shard < snapshotIndexMetaData.getNumberOfShards(); shard++) { if (!ignoreShards.contains(shard)) { shardsBuilder.put(new ShardId(renamedIndex, shard), new RestoreMetaData.ShardRestoreStatus(clusterService.state().nodes().localNodeId())); } else { shardsBuilder.put(new ShardId(renamedIndex, shard), new RestoreMetaData.ShardRestoreStatus(clusterService.state().nodes().localNodeId(), RestoreMetaData.State.FAILURE)); } } } shards = shardsBuilder.build(); RestoreMetaData.Entry restoreEntry = new RestoreMetaData.Entry(snapshotId, RestoreMetaData.State.INIT, ImmutableList.copyOf(renamedIndices.keySet()), shards); mdBuilder.putCustom(RestoreMetaData.TYPE, new RestoreMetaData(restoreEntry)); } else { shards = ImmutableMap.of(); } checkAliasNameConflicts(renamedIndices, aliases); // Restore global state if needed restoreGlobalStateIfRequested(mdBuilder); if (completed(shards)) { // We don't have any indices to restore - we are done restoreInfo = new RestoreInfo(request.name(), ImmutableList.copyOf(renamedIndices.keySet()), shards.size(), shards.size() - failedShards(shards)); } ClusterState updatedState = ClusterState.builder(currentState).metaData(mdBuilder).blocks(blocks).routingTable(rtBuilder).build(); RoutingAllocation.Result routingResult = allocationService.reroute(ClusterState.builder(updatedState).routingTable(rtBuilder).build()); return ClusterState.builder(updatedState).routingResult(routingResult).build(); } private void checkAliasNameConflicts(Map<String, String> renamedIndices, Set<String> aliases) { for (Map.Entry<String, String> renamedIndex : renamedIndices.entrySet()) { if (aliases.contains(renamedIndex.getKey())) { throw new SnapshotRestoreException(snapshotId, "cannot rename index [" + renamedIndex.getValue() + "] into [" + renamedIndex.getKey() + "] because of conflict with an alias with the same name"); } } } private void populateIgnoredShards(String index, IntSet ignoreShards) { for (SnapshotShardFailure failure : snapshot.shardFailures()) { if (index.equals(failure.index())) { ignoreShards.add(failure.shardId()); } } } private boolean checkPartial(String index) { // Make sure that index was fully snapshotted if (failed(snapshot, index)) { if (request.partial()) { return true; } else { throw new SnapshotRestoreException(snapshotId, "index [" + index + "] wasn't fully snapshotted - cannot restore"); } } else { return false; } } private void validateExistingIndex(IndexMetaData currentIndexMetaData, IndexMetaData snapshotIndexMetaData, String renamedIndex, boolean partial) { // Index exist - checking that it's closed if (currentIndexMetaData.state() != IndexMetaData.State.CLOSE) { // TODO: Enable restore for open indices throw new SnapshotRestoreException(snapshotId, "cannot restore index [" + renamedIndex + "] because it's open"); } // Index exist - checking if it's partial restore if (partial) { throw new SnapshotRestoreException(snapshotId, "cannot restore partial index [" + renamedIndex + "] because such index already exists"); } // Make sure that the number of shards is the same. That's the only thing that we cannot change if (currentIndexMetaData.getNumberOfShards() != snapshotIndexMetaData.getNumberOfShards()) { throw new SnapshotRestoreException(snapshotId, "cannot restore index [" + renamedIndex + "] with [" + currentIndexMetaData.getNumberOfShards() + "] shard from snapshot with [" + snapshotIndexMetaData.getNumberOfShards() + "] shards"); } } /** * Optionally updates index settings in indexMetaData by removing settings listed in ignoreSettings and * merging them with settings in changeSettings. */ private IndexMetaData updateIndexSettings(IndexMetaData indexMetaData, Settings changeSettings, String[] ignoreSettings) { if (changeSettings.names().isEmpty() && ignoreSettings.length == 0) { return indexMetaData; } IndexMetaData.Builder builder = IndexMetaData.builder(indexMetaData); Map<String, String> settingsMap = newHashMap(indexMetaData.settings().getAsMap()); List<String> simpleMatchPatterns = newArrayList(); for (String ignoredSetting : ignoreSettings) { if (!Regex.isSimpleMatchPattern(ignoredSetting)) { if (UNREMOVABLE_SETTINGS.contains(ignoredSetting)) { throw new SnapshotRestoreException(snapshotId, "cannot remove setting [" + ignoredSetting + "] on restore"); } else { settingsMap.remove(ignoredSetting); } } else { simpleMatchPatterns.add(ignoredSetting); } } if (!simpleMatchPatterns.isEmpty()) { String[] removePatterns = simpleMatchPatterns.toArray(new String[simpleMatchPatterns.size()]); Iterator<Map.Entry<String, String>> iterator = settingsMap.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<String, String> entry = iterator.next(); if (UNREMOVABLE_SETTINGS.contains(entry.getKey()) == false) { if (Regex.simpleMatch(removePatterns, entry.getKey())) { iterator.remove(); } } } } for(Map.Entry<String, String> entry : changeSettings.getAsMap().entrySet()) { if (UNMODIFIABLE_SETTINGS.contains(entry.getKey())) { throw new SnapshotRestoreException(snapshotId, "cannot modify setting [" + entry.getKey() + "] on restore"); } else { settingsMap.put(entry.getKey(), entry.getValue()); } } return builder.settings(ImmutableSettings.builder().put(settingsMap)).build(); } private void restoreGlobalStateIfRequested(MetaData.Builder mdBuilder) { if (request.includeGlobalState()) { if (metaData.persistentSettings() != null) { boolean changed = false; ImmutableSettings.Builder persistentSettings = ImmutableSettings.settingsBuilder().put(); for (Map.Entry<String, String> entry : metaData.persistentSettings().getAsMap().entrySet()) { if (dynamicSettings.isDynamicOrLoggingSetting(entry.getKey())) { String error = dynamicSettings.validateDynamicSetting(entry.getKey(), entry.getValue()); if (error == null) { persistentSettings.put(entry.getKey(), entry.getValue()); changed = true; } else { logger.warn("ignoring persistent setting [{}], [{}]", entry.getKey(), error); } } else { logger.warn("ignoring persistent setting [{}], not dynamically updateable", entry.getKey()); } } if (changed) { mdBuilder.persistentSettings(persistentSettings.build()); } } if (metaData.templates() != null) { // TODO: Should all existing templates be deleted first? for (ObjectCursor<IndexTemplateMetaData> cursor : metaData.templates().values()) { mdBuilder.put(cursor.value); } } if (metaData.customs() != null) { for (ObjectObjectCursor<String, MetaData.Custom> cursor : metaData.customs()) { if (!RepositoriesMetaData.TYPE.equals(cursor.key)) { // Don't restore repositories while we are working with them // TODO: Should we restore them at the end? mdBuilder.putCustom(cursor.key, cursor.value); } } } } } @Override public void onFailure(String source, Throwable t) { logger.warn("[{}] failed to restore snapshot", t, snapshotId); listener.onFailure(t); } @Override public TimeValue timeout() { return request.masterNodeTimeout(); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { listener.onResponse(restoreInfo); } }); } catch (Throwable e) { logger.warn("[{}][{}] failed to restore snapshot", e, request.repository(), request.name()); listener.onFailure(e); } } /** * This method is used by {@link org.elasticsearch.index.snapshots.IndexShardSnapshotAndRestoreService} to notify * {@code RestoreService} about shard restore completion. * * @param snapshotId snapshot id * @param shardId shard id */ public void indexShardRestoreCompleted(SnapshotId snapshotId, ShardId shardId) { logger.trace("[{}] successfully restored shard [{}]", snapshotId, shardId); UpdateIndexShardRestoreStatusRequest request = new UpdateIndexShardRestoreStatusRequest(snapshotId, shardId, new ShardRestoreStatus(clusterService.state().nodes().localNodeId(), RestoreMetaData.State.SUCCESS)); if (clusterService.state().nodes().localNodeMaster()) { innerUpdateRestoreState(request); } else { transportService.sendRequest(clusterService.state().nodes().masterNode(), UPDATE_RESTORE_ACTION_NAME, request, EmptyTransportResponseHandler.INSTANCE_SAME); } } public final static class RestoreCompletionResponse { private final SnapshotId snapshotId; private final RestoreInfo restoreInfo; private RestoreCompletionResponse(SnapshotId snapshotId, RestoreInfo restoreInfo) { this.snapshotId = snapshotId; this.restoreInfo = restoreInfo; } public SnapshotId getSnapshotId() { return snapshotId; } public RestoreInfo getRestoreInfo() { return restoreInfo; } } /** * Updates shard restore record in the cluster state. * * @param request update shard status request */ private void innerUpdateRestoreState(final UpdateIndexShardRestoreStatusRequest request) { clusterService.submitStateUpdateTask("update snapshot state", new ProcessedClusterStateUpdateTask() { private RestoreInfo restoreInfo = null; private Map<ShardId, ShardRestoreStatus> shards = null; @Override public ClusterState execute(ClusterState currentState) { MetaData metaData = currentState.metaData(); MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); RestoreMetaData restore = metaData.custom(RestoreMetaData.TYPE); if (restore != null) { boolean changed = false; boolean found = false; ArrayList<RestoreMetaData.Entry> entries = newArrayList(); for (RestoreMetaData.Entry entry : restore.entries()) { if (entry.snapshotId().equals(request.snapshotId())) { assert !found; found = true; Map<ShardId, ShardRestoreStatus> shards = newHashMap(entry.shards()); logger.trace("[{}] Updating shard [{}] with status [{}]", request.snapshotId(), request.shardId(), request.status().state()); shards.put(request.shardId(), request.status()); if (!completed(shards)) { entries.add(new RestoreMetaData.Entry(entry.snapshotId(), RestoreMetaData.State.STARTED, entry.indices(), ImmutableMap.copyOf(shards))); } else { logger.info("restore [{}] is done", request.snapshotId()); restoreInfo = new RestoreInfo(entry.snapshotId().getSnapshot(), entry.indices(), shards.size(), shards.size() - failedShards(shards)); this.shards = shards; } changed = true; } else { entries.add(entry); } } if (changed) { restore = new RestoreMetaData(entries.toArray(new RestoreMetaData.Entry[entries.size()])); mdBuilder.putCustom(RestoreMetaData.TYPE, restore); return ClusterState.builder(currentState).metaData(mdBuilder).build(); } } return currentState; } @Override public void onFailure(String source, @Nullable Throwable t) { logger.warn("[{}][{}] failed to update snapshot status to [{}]", t, request.snapshotId(), request.shardId(), request.status()); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { if (restoreInfo != null) { RoutingTable routingTable = newState.getRoutingTable(); final List<ShardId> waitForStarted = newArrayList(); for (Map.Entry<ShardId, ShardRestoreStatus> shard : shards.entrySet()) { if (shard.getValue().state() == RestoreMetaData.State.SUCCESS ) { ShardId shardId = shard.getKey(); ShardRouting shardRouting = findPrimaryShard(routingTable, shardId); if (shardRouting != null && !shardRouting.active()) { logger.trace("[{}][{}] waiting for the shard to start", request.snapshotId(), shardId); waitForStarted.add(shardId); } } } if (waitForStarted.isEmpty()) { notifyListeners(); } else { clusterService.addLast(new ClusterStateListener() { @Override public void clusterChanged(ClusterChangedEvent event) { if (event.routingTableChanged()) { RoutingTable routingTable = event.state().getRoutingTable(); for (Iterator<ShardId> iterator = waitForStarted.iterator(); iterator.hasNext();) { ShardId shardId = iterator.next(); ShardRouting shardRouting = findPrimaryShard(routingTable, shardId); // Shard disappeared (index deleted) or became active if (shardRouting == null || shardRouting.active()) { iterator.remove(); logger.trace("[{}][{}] shard disappeared or started - removing", request.snapshotId(), shardId); } } } if (waitForStarted.isEmpty()) { notifyListeners(); clusterService.remove(this); } } }); } } } private ShardRouting findPrimaryShard(RoutingTable routingTable, ShardId shardId) { IndexRoutingTable indexRoutingTable = routingTable.index(shardId.getIndex()); if (indexRoutingTable != null) { IndexShardRoutingTable indexShardRoutingTable = indexRoutingTable.shard(shardId.id()); if (indexShardRoutingTable != null) { return indexShardRoutingTable.primaryShard(); } } return null; } private void notifyListeners() { for (ActionListener<RestoreCompletionResponse> listener : listeners) { try { listener.onResponse(new RestoreCompletionResponse(request.snapshotId, restoreInfo)); } catch (Throwable e) { logger.warn("failed to update snapshot status for [{}]", e, listener); } } } }); } private boolean completed(Map<ShardId, RestoreMetaData.ShardRestoreStatus> shards) { for (RestoreMetaData.ShardRestoreStatus status : shards.values()) { if (!status.state().completed()) { return false; } } return true; } private int failedShards(Map<ShardId, RestoreMetaData.ShardRestoreStatus> shards) { int failedShards = 0; for (RestoreMetaData.ShardRestoreStatus status : shards.values()) { if (status.state() == RestoreMetaData.State.FAILURE) { failedShards++; } } return failedShards; } private Map<String, String> renamedIndices(RestoreRequest request, ImmutableList<String> filteredIndices) { Map<String, String> renamedIndices = newHashMap(); for (String index : filteredIndices) { String renamedIndex = index; if (request.renameReplacement() != null && request.renamePattern() != null) { renamedIndex = index.replaceAll(request.renamePattern(), request.renameReplacement()); } String previousIndex = renamedIndices.put(renamedIndex, index); if (previousIndex != null) { throw new SnapshotRestoreException(new SnapshotId(request.repository(), request.name()), "indices [" + index + "] and [" + previousIndex + "] are renamed into the same index [" + renamedIndex + "]"); } } return renamedIndices; } /** * Checks that snapshots can be restored and have compatible version * * @param snapshotId snapshot id * @param snapshot snapshot metadata */ private void validateSnapshotRestorable(SnapshotId snapshotId, Snapshot snapshot) { if (!snapshot.state().restorable()) { throw new SnapshotRestoreException(snapshotId, "unsupported snapshot state [" + snapshot.state() + "]"); } if (Version.CURRENT.before(snapshot.version())) { throw new SnapshotRestoreException(snapshotId, "the snapshot was created with Elasticsearch version [" + snapshot.version() + "] which is higher than the version of this node [" + Version.CURRENT + "]"); } } /** * Checks if any of the deleted indices are still recovering and fails recovery on the shards of these indices * * @param event cluster changed event */ private void processDeletedIndices(ClusterChangedEvent event) { MetaData metaData = event.state().metaData(); RestoreMetaData restore = metaData.custom(RestoreMetaData.TYPE); if (restore == null) { // Not restoring - nothing to do return; } if (!event.indicesDeleted().isEmpty()) { // Some indices were deleted, let's make sure all indices that we are restoring still exist for (RestoreMetaData.Entry entry : restore.entries()) { List<ShardId> shardsToFail = null; for (ImmutableMap.Entry<ShardId, ShardRestoreStatus> shard : entry.shards().entrySet()) { if (!shard.getValue().state().completed()) { if (!event.state().metaData().hasIndex(shard.getKey().getIndex())) { if (shardsToFail == null) { shardsToFail = newArrayList(); } shardsToFail.add(shard.getKey()); } } } if (shardsToFail != null) { for (ShardId shardId : shardsToFail) { logger.trace("[{}] failing running shard restore [{}]", entry.snapshotId(), shardId); innerUpdateRestoreState(new UpdateIndexShardRestoreStatusRequest(entry.snapshotId(), shardId, new ShardRestoreStatus(null, RestoreMetaData.State.FAILURE, "index was deleted"))); } } } } } /** * Fails the given snapshot restore operation for the given shard */ public void failRestore(SnapshotId snapshotId, ShardId shardId) { logger.debug("[{}] failed to restore shard [{}]", snapshotId, shardId); UpdateIndexShardRestoreStatusRequest request = new UpdateIndexShardRestoreStatusRequest(snapshotId, shardId, new ShardRestoreStatus(clusterService.state().nodes().localNodeId(), RestoreMetaData.State.FAILURE)); if (clusterService.state().nodes().localNodeMaster()) { innerUpdateRestoreState(request); } else { transportService.sendRequest(clusterService.state().nodes().masterNode(), UPDATE_RESTORE_ACTION_NAME, request, EmptyTransportResponseHandler.INSTANCE_SAME); } } private boolean failed(Snapshot snapshot, String index) { for (SnapshotShardFailure failure : snapshot.shardFailures()) { if (index.equals(failure.index())) { return true; } } return false; } /** * Adds restore completion listener * <p/> * This listener is called for each snapshot that finishes restore operation in the cluster. It's responsibility of * the listener to decide if it's called for the appropriate snapshot or not. * * @param listener restore completion listener */ public void addListener(ActionListener<RestoreCompletionResponse> listener) { this.listeners.add(listener); } /** * Removes restore completion listener * <p/> * This listener is called for each snapshot that finishes restore operation in the cluster. * * @param listener restore completion listener */ public void removeListener(ActionListener<RestoreCompletionResponse> listener) { this.listeners.remove(listener); } @Override public void clusterChanged(ClusterChangedEvent event) { try { if (event.localNodeMaster()) { processDeletedIndices(event); } } catch (Throwable t) { logger.warn("Failed to update restore state ", t); } } /** * Checks if a repository is currently in use by one of the snapshots * * @param clusterState cluster state * @param repository repository id * @return true if repository is currently in use by one of the running snapshots */ public static boolean isRepositoryInUse(ClusterState clusterState, String repository) { MetaData metaData = clusterState.metaData(); RestoreMetaData snapshots = metaData.custom(RestoreMetaData.TYPE); if (snapshots != null) { for (RestoreMetaData.Entry snapshot : snapshots.entries()) { if (repository.equals(snapshot.snapshotId().getRepository())) { return true; } } } return false; } /** * Restore snapshot request */ public static class RestoreRequest { final private String cause; final private String name; final private String repository; final private String[] indices; final private String renamePattern; final private String renameReplacement; final private IndicesOptions indicesOptions; final private Settings settings; final private TimeValue masterNodeTimeout; final private boolean includeGlobalState; final private boolean partial; final private boolean includeAliases; final private Settings indexSettings; final private String[] ignoreIndexSettings; /** * Constructs new restore request * * @param cause cause for restoring the snapshot * @param repository repository name * @param name snapshot name * @param indices list of indices to restore * @param indicesOptions indices options * @param renamePattern pattern to rename indices * @param renameReplacement replacement for renamed indices * @param settings repository specific restore settings * @param masterNodeTimeout master node timeout * @param includeGlobalState include global state into restore * @param partial allow partial restore * @param indexSettings index settings that should be changed on restore * @param ignoreIndexSettings index settings that shouldn't be restored */ public RestoreRequest(String cause, String repository, String name, String[] indices, IndicesOptions indicesOptions, String renamePattern, String renameReplacement, Settings settings, TimeValue masterNodeTimeout, boolean includeGlobalState, boolean partial, boolean includeAliases, Settings indexSettings, String[] ignoreIndexSettings ) { this.cause = cause; this.name = name; this.repository = repository; this.indices = indices; this.renamePattern = renamePattern; this.renameReplacement = renameReplacement; this.indicesOptions = indicesOptions; this.settings = settings; this.masterNodeTimeout = masterNodeTimeout; this.includeGlobalState = includeGlobalState; this.partial = partial; this.includeAliases = includeAliases; this.indexSettings = indexSettings; this.ignoreIndexSettings = ignoreIndexSettings; } /** * Returns restore operation cause * * @return restore operation cause */ public String cause() { return cause; } /** * Returns snapshot name * * @return snapshot name */ public String name() { return name; } /** * Returns repository name * * @return repository name */ public String repository() { return repository; } /** * Return the list of indices to be restored * * @return the list of indices */ public String[] indices() { return indices; } /** * Returns indices option flags * * @return indices options flags */ public IndicesOptions indicesOptions() { return indicesOptions; } /** * Returns rename pattern * * @return rename pattern */ public String renamePattern() { return renamePattern; } /** * Returns replacement pattern * * @return replacement pattern */ public String renameReplacement() { return renameReplacement; } /** * Returns repository-specific restore settings * * @return restore settings */ public Settings settings() { return settings; } /** * Returns true if global state should be restore during this restore operation * * @return restore global state flag */ public boolean includeGlobalState() { return includeGlobalState; } /** * Returns true if incomplete indices will be restored * * @return partial indices restore flag */ public boolean partial() { return partial; } /** * Returns true if aliases should be restore during this restore operation * * @return restore aliases state flag */ public boolean includeAliases() { return includeAliases; } /** * Returns index settings that should be changed on restore * * @return restore aliases state flag */ public Settings indexSettings() { return indexSettings; } /** * Returns index settings that that shouldn't be restored * * @return restore aliases state flag */ public String[] ignoreIndexSettings() { return ignoreIndexSettings; } /** * Return master node timeout * * @return master node timeout */ public TimeValue masterNodeTimeout() { return masterNodeTimeout; } } /** * Internal class that is used to send notifications about finished shard restore operations to master node */ private static class UpdateIndexShardRestoreStatusRequest extends TransportRequest { private SnapshotId snapshotId; private ShardId shardId; private ShardRestoreStatus status; private UpdateIndexShardRestoreStatusRequest() { } private UpdateIndexShardRestoreStatusRequest(SnapshotId snapshotId, ShardId shardId, ShardRestoreStatus status) { this.snapshotId = snapshotId; this.shardId = shardId; this.status = status; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); snapshotId = SnapshotId.readSnapshotId(in); shardId = ShardId.readShardId(in); status = ShardRestoreStatus.readShardRestoreStatus(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); snapshotId.writeTo(out); shardId.writeTo(out); status.writeTo(out); } public SnapshotId snapshotId() { return snapshotId; } public ShardId shardId() { return shardId; } public ShardRestoreStatus status() { return status; } } /** * Internal class that is used to send notifications about finished shard restore operations to master node */ private class UpdateRestoreStateRequestHandler extends BaseTransportRequestHandler<UpdateIndexShardRestoreStatusRequest> { @Override public UpdateIndexShardRestoreStatusRequest newInstance() { return new UpdateIndexShardRestoreStatusRequest(); } @Override public void messageReceived(UpdateIndexShardRestoreStatusRequest request, final TransportChannel channel) throws Exception { innerUpdateRestoreState(request); channel.sendResponse(TransportResponse.Empty.INSTANCE); } @Override public String executor() { return ThreadPool.Names.SAME; } } }
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.urqa.externallibrary.okhttp; import com.urqa.externallibrary.okhttp.internal.DiskLruCache; import com.urqa.externallibrary.okhttp.internal.InternalCache; import com.urqa.externallibrary.okhttp.internal.Util; import com.urqa.externallibrary.okhttp.internal.http.CacheRequest; import com.urqa.externallibrary.okhttp.internal.http.CacheStrategy; import com.urqa.externallibrary.okhttp.internal.http.HttpMethod; import com.urqa.externallibrary.okhttp.internal.http.OkHeaders; import com.urqa.externallibrary.okhttp.internal.http.StatusLine; import com.urqa.externallibrary.okhttp.internal.io.FileSystem; import java.io.File; import java.io.IOException; import java.security.cert.Certificate; import java.security.cert.CertificateEncodingException; import java.security.cert.CertificateException; import java.security.cert.CertificateFactory; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import com.urqa.externallibrary.okio.Buffer; import com.urqa.externallibrary.okio.BufferedSink; import com.urqa.externallibrary.okio.BufferedSource; import com.urqa.externallibrary.okio.ByteString; import com.urqa.externallibrary.okio.ForwardingSink; import com.urqa.externallibrary.okio.ForwardingSource; import com.urqa.externallibrary.okio.Okio; import com.urqa.externallibrary.okio.Sink; import com.urqa.externallibrary.okio.Source; /** * Caches HTTP and HTTPS responses to the filesystem so they may be reused, saving time and * bandwidth. * * <h3>Cache Optimization</h3> * To measure cache effectiveness, this class tracks three statistics: * <ul> * <li><strong>{@linkplain #getRequestCount() Request Count:}</strong> the number of HTTP * requests issued since this cache was created. * <li><strong>{@linkplain #getNetworkCount() Network Count:}</strong> the number of those * requests that required network use. * <li><strong>{@linkplain #getHitCount() Hit Count:}</strong> the number of those requests whose * responses were served by the cache. * </ul> * * Sometimes a request will result in a conditional cache hit. If the cache contains a stale copy of * the response, the client will issue a conditional {@code GET}. The server will then send either * the updated response if it has changed, or a short 'not modified' response if the client's copy * is still valid. Such responses increment both the network count and hit count. * * <p>The best way to improve the cache hit rate is by configuring the web server to return * cacheable responses. Although this client honors all <a * href="http://tools.ietf.org/html/rfc7234">HTTP/1.1 (RFC 7234)</a> cache headers, it doesn't cache * partial responses. * * <h3>Force a Network Response</h3> * In some situations, such as after a user clicks a 'refresh' button, it may be necessary to skip * the cache, and fetch data directly from the server. To force a full refresh, add the {@code * no-cache} directive: <pre> {@code * * Request request = new Request.Builder() * .cacheControl(new CacheControl.Builder().noCache().build()) * .url("http://publicobject.com/helloworld.txt") * .build(); * }</pre> * * If it is only necessary to force a cached response to be validated by the server, use the more * efficient {@code max-age=0} directive instead: <pre> {@code * * Request request = new Request.Builder() * .cacheControl(new CacheControl.Builder() * .maxAge(0, TimeUnit.SECONDS) * .build()) * .url("http://publicobject.com/helloworld.txt") * .build(); * }</pre> * * <h3>Force a Cache Response</h3> * Sometimes you'll want to show resources if they are available immediately, but not otherwise. * This can be used so your application can show <i>something</i> while waiting for the latest data * to be downloaded. To restrict a request to locally-cached resources, add the {@code * only-if-cached} directive: <pre> {@code * * Request request = new Request.Builder() * .cacheControl(new CacheControl.Builder() * .onlyIfCached() * .build()) * .url("http://publicobject.com/helloworld.txt") * .build(); * Response forceCacheResponse = client.newCall(request).execute(); * if (forceCacheResponse.code() != 504) { * // The resource was cached! Show it. * } else { * // The resource was not cached. * } * }</pre> * This technique works even better in situations where a stale response is better than no response. * To permit stale cached responses, use the {@code max-stale} directive with the maximum staleness * in seconds: <pre> {@code * * Request request = new Request.Builder() * .cacheControl(new CacheControl.Builder() * .maxStale(365, TimeUnit.DAYS) * .build()) * .url("http://publicobject.com/helloworld.txt") * .build(); * }</pre> * * <p>The {@link CacheControl} class can configure request caching directives and parse response * caching directives. It even offers convenient constants {@link CacheControl#FORCE_NETWORK} and * {@link CacheControl#FORCE_CACHE} that address the use cases above. */ public final class Cache { private static final int VERSION = 201105; private static final int ENTRY_METADATA = 0; private static final int ENTRY_BODY = 1; private static final int ENTRY_COUNT = 2; final InternalCache internalCache = new InternalCache() { @Override public Response get(Request request) throws IOException { return Cache.this.get(request); } @Override public CacheRequest put(Response response) throws IOException { return Cache.this.put(response); } @Override public void remove(Request request) throws IOException { Cache.this.remove(request); } @Override public void update(Response cached, Response network) throws IOException { Cache.this.update(cached, network); } @Override public void trackConditionalCacheHit() { Cache.this.trackConditionalCacheHit(); } @Override public void trackResponse(CacheStrategy cacheStrategy) { Cache.this.trackResponse(cacheStrategy); } }; private final DiskLruCache cache; /* read and write statistics, all guarded by 'this' */ private int writeSuccessCount; private int writeAbortCount; private int networkCount; private int hitCount; private int requestCount; public Cache(File directory, long maxSize) { this(directory, maxSize, FileSystem.SYSTEM); } Cache(File directory, long maxSize, FileSystem fileSystem) { this.cache = DiskLruCache.create(fileSystem, directory, VERSION, ENTRY_COUNT, maxSize); } private static String urlToKey(Request request) { return Util.md5Hex(request.urlString()); } Response get(Request request) { String key = urlToKey(request); DiskLruCache.Snapshot snapshot; Entry entry; try { snapshot = cache.get(key); if (snapshot == null) { return null; } } catch (IOException e) { // Give up because the cache cannot be read. return null; } try { entry = new Entry(snapshot.getSource(ENTRY_METADATA)); } catch (IOException e) { Util.closeQuietly(snapshot); return null; } Response response = entry.response(request, snapshot); if (!entry.matches(request, response)) { Util.closeQuietly(response.body()); return null; } return response; } private CacheRequest put(Response response) throws IOException { String requestMethod = response.request().method(); if (HttpMethod.invalidatesCache(response.request().method())) { try { remove(response.request()); } catch (IOException ignored) { // The cache cannot be written. } return null; } if (!requestMethod.equals("GET")) { // Don't cache non-GET responses. We're technically allowed to cache // HEAD requests and some POST requests, but the complexity of doing // so is high and the benefit is low. return null; } if (OkHeaders.hasVaryAll(response)) { return null; } Entry entry = new Entry(response); DiskLruCache.Editor editor = null; try { editor = cache.edit(urlToKey(response.request())); if (editor == null) { return null; } entry.writeTo(editor); return new CacheRequestImpl(editor); } catch (IOException e) { abortQuietly(editor); return null; } } private void remove(Request request) throws IOException { cache.remove(urlToKey(request)); } private void update(Response cached, Response network) { Entry entry = new Entry(network); DiskLruCache.Snapshot snapshot = ((CacheResponseBody) cached.body()).snapshot; DiskLruCache.Editor editor = null; try { editor = snapshot.edit(); // Returns null if snapshot is not current. if (editor != null) { entry.writeTo(editor); editor.commit(); } } catch (IOException e) { abortQuietly(editor); } } private void abortQuietly(DiskLruCache.Editor editor) { // Give up because the cache cannot be written. try { if (editor != null) { editor.abort(); } } catch (IOException ignored) { } } /** * Initialize the cache. This will include reading the journal files from * the storage and building up the necessary in-memory cache information. * <p> * The initialization time may vary depending on the journal file size and * the current actual cache size. The application needs to be aware of calling * this function during the initialization phase and preferably in a background * worker thread. * <p> * Note that if the application chooses to not call this method to initialize * the cache. By default, the okhttp will perform lazy initialization upon the * first usage of the cache. */ public void initialize() throws IOException { cache.initialize(); } /** * Closes the cache and deletes all of its stored values. This will delete * all files in the cache directory including files that weren't created by * the cache. */ public void delete() throws IOException { cache.delete(); } /** * Deletes all values stored in the cache. In-flight writes to the cache will * complete normally, but the corresponding responses will not be stored. */ public void evictAll() throws IOException { cache.evictAll(); } /** * Returns an iterator over the URLs in this cache. This iterator doesn't throw {@code * ConcurrentModificationException}, but if new responses are added while iterating, their URLs * will not be returned. If existing responses are evicted during iteration, they will be absent * (unless they were already returned). * * <p>The iterator supports {@linkplain Iterator#remove}. Removing a URL from the iterator evicts * the corresponding response from the cache. Use this to evict selected responses. */ public Iterator<String> urls() throws IOException { return new Iterator<String>() { final Iterator<DiskLruCache.Snapshot> delegate = cache.snapshots(); String nextUrl; boolean canRemove; @Override public boolean hasNext() { if (nextUrl != null) return true; canRemove = false; // Prevent delegate.remove() on the wrong item! while (delegate.hasNext()) { DiskLruCache.Snapshot snapshot = delegate.next(); try { BufferedSource metadata = Okio.buffer(snapshot.getSource(ENTRY_METADATA)); nextUrl = metadata.readUtf8LineStrict(); return true; } catch (IOException ignored) { // We couldn't read the metadata for this snapshot; possibly because the host filesystem // has disappeared! Skip it. } finally { snapshot.close(); } } return false; } @Override public String next() { if (!hasNext()) throw new NoSuchElementException(); String result = nextUrl; nextUrl = null; canRemove = true; return result; } @Override public void remove() { if (!canRemove) throw new IllegalStateException("remove() before next()"); delegate.remove(); } }; } public synchronized int getWriteAbortCount() { return writeAbortCount; } public synchronized int getWriteSuccessCount() { return writeSuccessCount; } public long getSize() throws IOException { return cache.size(); } public long getMaxSize() { return cache.getMaxSize(); } public void flush() throws IOException { cache.flush(); } public void close() throws IOException { cache.close(); } public File getDirectory() { return cache.getDirectory(); } public boolean isClosed() { return cache.isClosed(); } private synchronized void trackResponse(CacheStrategy cacheStrategy) { requestCount++; if (cacheStrategy.networkRequest != null) { // If this is a conditional request, we'll increment hitCount if/when it hits. networkCount++; } else if (cacheStrategy.cacheResponse != null) { // This response uses the cache and not the network. That's a cache hit. hitCount++; } } private synchronized void trackConditionalCacheHit() { hitCount++; } public synchronized int getNetworkCount() { return networkCount; } public synchronized int getHitCount() { return hitCount; } public synchronized int getRequestCount() { return requestCount; } private final class CacheRequestImpl implements CacheRequest { private final DiskLruCache.Editor editor; private Sink cacheOut; private boolean done; private Sink body; public CacheRequestImpl(final DiskLruCache.Editor editor) throws IOException { this.editor = editor; this.cacheOut = editor.newSink(ENTRY_BODY); this.body = new ForwardingSink(cacheOut) { @Override public void close() throws IOException { synchronized (Cache.this) { if (done) { return; } done = true; writeSuccessCount++; } super.close(); editor.commit(); } }; } @Override public void abort() { synchronized (Cache.this) { if (done) { return; } done = true; writeAbortCount++; } Util.closeQuietly(cacheOut); try { editor.abort(); } catch (IOException ignored) { } } @Override public Sink body() { return body; } } private static final class Entry { private final String url; private final Headers varyHeaders; private final String requestMethod; private final Protocol protocol; private final int code; private final String message; private final Headers responseHeaders; private final Handshake handshake; /** * Reads an entry from an input stream. A typical entry looks like this: * <pre>{@code * http://google.com/foo * GET * 2 * Accept-Language: fr-CA * Accept-Charset: UTF-8 * HTTP/1.1 200 OK * 3 * Content-Type: image/png * Content-Length: 100 * Cache-Control: max-age=600 * }</pre> * * <p>A typical HTTPS file looks like this: * <pre>{@code * https://google.com/foo * GET * 2 * Accept-Language: fr-CA * Accept-Charset: UTF-8 * HTTP/1.1 200 OK * 3 * Content-Type: image/png * Content-Length: 100 * Cache-Control: max-age=600 * * AES_256_WITH_MD5 * 2 * base64-encoded peerCertificate[0] * base64-encoded peerCertificate[1] * -1 * }</pre> * The file is newline separated. The first two lines are the URL and * the request method. Next is the number of HTTP Vary request header * lines, followed by those lines. * * <p>Next is the response status line, followed by the number of HTTP * response header lines, followed by those lines. * * <p>HTTPS responses also contain SSL session information. This begins * with a blank line, and then a line containing the cipher suite. Next * is the length of the peer certificate chain. These certificates are * base64-encoded and appear each on their own line. The next line * contains the length of the local certificate chain. These * certificates are also base64-encoded and appear each on their own * line. A length of -1 is used to encode a null array. */ public Entry(Source in) throws IOException { try { BufferedSource source = Okio.buffer(in); url = source.readUtf8LineStrict(); requestMethod = source.readUtf8LineStrict(); Headers.Builder varyHeadersBuilder = new Headers.Builder(); int varyRequestHeaderLineCount = readInt(source); for (int i = 0; i < varyRequestHeaderLineCount; i++) { varyHeadersBuilder.addLenient(source.readUtf8LineStrict()); } varyHeaders = varyHeadersBuilder.build(); StatusLine statusLine = StatusLine.parse(source.readUtf8LineStrict()); protocol = statusLine.protocol; code = statusLine.code; message = statusLine.message; Headers.Builder responseHeadersBuilder = new Headers.Builder(); int responseHeaderLineCount = readInt(source); for (int i = 0; i < responseHeaderLineCount; i++) { responseHeadersBuilder.addLenient(source.readUtf8LineStrict()); } responseHeaders = responseHeadersBuilder.build(); if (isHttps()) { String blank = source.readUtf8LineStrict(); if (blank.length() > 0) { throw new IOException("expected \"\" but was \"" + blank + "\""); } String cipherSuite = source.readUtf8LineStrict(); List<Certificate> peerCertificates = readCertificateList(source); List<Certificate> localCertificates = readCertificateList(source); handshake = Handshake.get(cipherSuite, peerCertificates, localCertificates); } else { handshake = null; } } finally { in.close(); } } public Entry(Response response) { this.url = response.request().urlString(); this.varyHeaders = OkHeaders.varyHeaders(response); this.requestMethod = response.request().method(); this.protocol = response.protocol(); this.code = response.code(); this.message = response.message(); this.responseHeaders = response.headers(); this.handshake = response.handshake(); } public void writeTo(DiskLruCache.Editor editor) throws IOException { BufferedSink sink = Okio.buffer(editor.newSink(ENTRY_METADATA)); sink.writeUtf8(url); sink.writeByte('\n'); sink.writeUtf8(requestMethod); sink.writeByte('\n'); sink.writeDecimalLong(varyHeaders.size()); sink.writeByte('\n'); for (int i = 0, size = varyHeaders.size(); i < size; i++) { sink.writeUtf8(varyHeaders.name(i)); sink.writeUtf8(": "); sink.writeUtf8(varyHeaders.value(i)); sink.writeByte('\n'); } sink.writeUtf8(new StatusLine(protocol, code, message).toString()); sink.writeByte('\n'); sink.writeDecimalLong(responseHeaders.size()); sink.writeByte('\n'); for (int i = 0, size = responseHeaders.size(); i < size; i++) { sink.writeUtf8(responseHeaders.name(i)); sink.writeUtf8(": "); sink.writeUtf8(responseHeaders.value(i)); sink.writeByte('\n'); } if (isHttps()) { sink.writeByte('\n'); sink.writeUtf8(handshake.cipherSuite()); sink.writeByte('\n'); writeCertList(sink, handshake.peerCertificates()); writeCertList(sink, handshake.localCertificates()); } sink.close(); } private boolean isHttps() { return url.startsWith("https://"); } private List<Certificate> readCertificateList(BufferedSource source) throws IOException { int length = readInt(source); if (length == -1) return Collections.emptyList(); // OkHttp v1.2 used -1 to indicate null. try { CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509"); List<Certificate> result = new ArrayList<>(length); for (int i = 0; i < length; i++) { String line = source.readUtf8LineStrict(); Buffer bytes = new Buffer(); bytes.write(ByteString.decodeBase64(line)); result.add(certificateFactory.generateCertificate(bytes.inputStream())); } return result; } catch (CertificateException e) { throw new IOException(e.getMessage()); } } private void writeCertList(BufferedSink sink, List<Certificate> certificates) throws IOException { try { sink.writeDecimalLong(certificates.size()); sink.writeByte('\n'); for (int i = 0, size = certificates.size(); i < size; i++) { byte[] bytes = certificates.get(i).getEncoded(); String line = ByteString.of(bytes).base64(); sink.writeUtf8(line); sink.writeByte('\n'); } } catch (CertificateEncodingException e) { throw new IOException(e.getMessage()); } } public boolean matches(Request request, Response response) { return url.equals(request.urlString()) && requestMethod.equals(request.method()) && OkHeaders.varyMatches(response, varyHeaders, request); } public Response response(Request request, DiskLruCache.Snapshot snapshot) { String contentType = responseHeaders.get("Content-Type"); String contentLength = responseHeaders.get("Content-Length"); Request cacheRequest = new Request.Builder() .url(url) .method(requestMethod, null) .headers(varyHeaders) .build(); return new Response.Builder() .request(cacheRequest) .protocol(protocol) .code(code) .message(message) .headers(responseHeaders) .body(new CacheResponseBody(snapshot, contentType, contentLength)) .handshake(handshake) .build(); } } private static int readInt(BufferedSource source) throws IOException { try { long result = source.readDecimalLong(); String line = source.readUtf8LineStrict(); if (result < 0 || result > Integer.MAX_VALUE || !line.isEmpty()) { throw new IOException("expected an int but was \"" + result + line + "\""); } return (int) result; } catch (NumberFormatException e) { throw new IOException(e.getMessage()); } } private static class CacheResponseBody extends ResponseBody { private final DiskLruCache.Snapshot snapshot; private final BufferedSource bodySource; private final String contentType; private final String contentLength; public CacheResponseBody(final DiskLruCache.Snapshot snapshot, String contentType, String contentLength) { this.snapshot = snapshot; this.contentType = contentType; this.contentLength = contentLength; Source source = snapshot.getSource(ENTRY_BODY); bodySource = Okio.buffer(new ForwardingSource(source) { @Override public void close() throws IOException { snapshot.close(); super.close(); } }); } @Override public MediaType contentType() { return contentType != null ? MediaType.parse(contentType) : null; } @Override public long contentLength() { try { return contentLength != null ? Long.parseLong(contentLength) : -1; } catch (NumberFormatException e) { return -1; } } @Override public BufferedSource source() { return bodySource; } } }
package nam.model.util; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import nam.model.EjbTransport; import nam.model.HttpTransport; import nam.model.JmsTransport; import nam.model.Project; import nam.model.RmiTransport; import nam.model.Role2; import nam.model.Transport; import nam.model.TransportType; import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang.StringUtils; import org.aries.Assert; import org.aries.util.BaseUtil; import org.aries.util.ObjectUtil; import org.aries.util.Validator; public class TransportUtil extends BaseUtil { public static Object getKey(Transport transport) { return transport.getName(); } public static String getLabel(Transport transport) { return transport.getName(); } public static boolean isEmpty(Transport transport) { if (transport == null) return true; boolean status = false; status |= StringUtils.isEmpty(transport.getName()); return status; } public static boolean isEmpty(Collection<Transport> transportList) { if (transportList == null || transportList.size() == 0) return true; Iterator<Transport> iterator = transportList.iterator(); while (iterator.hasNext()) { Transport transport = iterator.next(); if (!isEmpty(transport)) return false; } return true; } public static String toString(Transport transport) { if (isEmpty(transport)) return "Transport: [uninitialized] "+transport.toString(); String text = transport.toString(); return text; } public static String toString(Collection<Transport> transportList) { if (isEmpty(transportList)) return ""; StringBuffer buf = new StringBuffer(); Iterator<Transport> iterator = transportList.iterator(); for (int i=0; iterator.hasNext(); i++) { Transport transport = iterator.next(); if (i > 0) buf.append(", "); String text = toString(transport); buf.append(text); } String text = StringEscapeUtils.escapeJavaScript(buf.toString()); return text; } public static Transport create() { Transport transport = new Transport(); initialize(transport); return transport; } public static void initialize(Transport transport) { if (transport.getTransacted() == null) transport.setTransacted(false); } public static boolean validate(Transport transport) { if (transport == null) return false; Validator validator = Validator.getValidator(); validator.notEmpty(transport.getName(), "\"Name\" must be specified"); boolean isValid = validator.isValid(); return isValid; } public static boolean validate(Collection<Transport> transportList) { Validator validator = Validator.getValidator(); Iterator<Transport> iterator = transportList.iterator(); while (iterator.hasNext()) { Transport transport = iterator.next(); //TODO break or accumulate? validate(transport); } boolean isValid = validator.isValid(); return isValid; } public static void sortRecords(List<Transport> transportList) { Collections.sort(transportList, createTransportComparator()); } public static Collection<Transport> sortRecords(Collection<Transport> transportCollection) { List<Transport> list = new ArrayList<Transport>(transportCollection); Collections.sort(list, createTransportComparator()); return list; } public static Comparator<Transport> createTransportComparator() { return new Comparator<Transport>() { public int compare(Transport transport1, Transport transport2) { Object key1 = getKey(transport1); Object key2 = getKey(transport2); String text1 = key1.toString(); String text2 = key2.toString(); int status = text1.compareTo(text2); return status; } }; } public static Transport clone(Transport transport) { if (transport == null) return null; Transport clone = create(); clone.setName(ObjectUtil.clone(transport.getName())); clone.setType(transport.getType()); clone.setHost(ObjectUtil.clone(transport.getHost())); clone.setPort(ObjectUtil.clone(transport.getPort())); clone.setScope(ObjectUtil.clone(transport.getScope())); clone.setTransferMode(transport.getTransferMode()); clone.setTransacted(ObjectUtil.clone(transport.getTransacted())); clone.setProvider(ObjectUtil.clone(transport.getProvider())); return clone; } // public static org.aries.TransportType convertTransportType(TransportType transportType) { // switch (transportType) { // case RMI: return org.aries.TransportType.RMI; // case EJB: return org.aries.TransportType.EJB; // case HTTP: return org.aries.TransportType.HTTP; // case JMS: return org.aries.TransportType.JMS; // default: return null; // } // } public static TransportType getTransportType(Transport transport) { //TransportType transportType = null; if (transport instanceof RmiTransport) return TransportType.RMI; if (transport instanceof EjbTransport) return TransportType.EJB; if (transport instanceof HttpTransport) return TransportType.HTTP; if (transport instanceof JmsTransport) return TransportType.JMS; return null; } public static RmiTransport getRMITransport(Project project, Role2 role) { return (RmiTransport) getTransport(project, role.getTransports(), TransportType.RMI); } public static EjbTransport getEJBTransport(Project project, Role2 role) { return (EjbTransport) getTransport(project, role.getTransports(), TransportType.EJB); } public static HttpTransport getHTTPTransport(Project project, Role2 role) { return (HttpTransport) getTransport(project, role.getTransports(), TransportType.HTTP); } public static JmsTransport getJMSTransport(Project project, Role2 role) { return (JmsTransport) getTransport(project, role.getTransports(), TransportType.JMS); } public static Transport getTransport(Project project, Role2 role, TransportType transportType) { return getTransport(project, role.getTransports(), transportType); } public static Transport getTransport(Project project, List<Transport> transports, TransportType transportType) { Iterator<Transport> transportIterator = transports.iterator(); while (transportIterator.hasNext()) { Transport transport = transportIterator.next(); String transportRef = transport.getRef(); if (transportRef != null) transport = MessagingUtil.getTransportByName(project, transportRef); Assert.notNull(transport, "Transport not found: "+transportRef); if (transportType == TransportUtil.getTransportType(transport)) { return transport; } } return null; } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ml.job.persistence; import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.Client; import org.elasticsearch.client.ClusterAdminClient; import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; import org.mockito.ArgumentCaptor; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.util.List; import java.util.Map; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class MockClientBuilder { private Client client; private AdminClient adminClient; private ClusterAdminClient clusterAdminClient; private IndicesAdminClient indicesAdminClient; public MockClientBuilder(String clusterName) { client = mock(Client.class); adminClient = mock(AdminClient.class); clusterAdminClient = mock(ClusterAdminClient.class); indicesAdminClient = mock(IndicesAdminClient.class); when(client.admin()).thenReturn(adminClient); when(adminClient.cluster()).thenReturn(clusterAdminClient); when(adminClient.indices()).thenReturn(indicesAdminClient); Settings settings = Settings.builder().put("cluster.name", clusterName).build(); when(client.settings()).thenReturn(settings); ThreadPool threadPool = mock(ThreadPool.class); when(client.threadPool()).thenReturn(threadPool); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); } @SuppressWarnings({ "unchecked" }) public MockClientBuilder addClusterStatusYellowResponse() { PlainActionFuture<ClusterHealthResponse> actionFuture = mock(PlainActionFuture.class); ClusterHealthRequestBuilder clusterHealthRequestBuilder = mock(ClusterHealthRequestBuilder.class); when(clusterAdminClient.prepareHealth()).thenReturn(clusterHealthRequestBuilder); when(clusterHealthRequestBuilder.setWaitForYellowStatus()).thenReturn(clusterHealthRequestBuilder); when(clusterHealthRequestBuilder.execute()).thenReturn(actionFuture); when(actionFuture.actionGet()).thenReturn(mock(ClusterHealthResponse.class)); return this; } @SuppressWarnings("unchecked") public MockClientBuilder get(GetResponse response) { doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocationOnMock) { ActionListener<GetResponse> listener = (ActionListener<GetResponse>) invocationOnMock.getArguments()[1]; listener.onResponse(response); return null; } }).when(client).get(any(), any()); return this; } public MockClientBuilder prepareCreate(String index) { CreateIndexRequestBuilder createIndexRequestBuilder = mock(CreateIndexRequestBuilder.class); CreateIndexResponse response = mock(CreateIndexResponse.class); when(createIndexRequestBuilder.setSettings(any(Settings.Builder.class))).thenReturn(createIndexRequestBuilder); when(createIndexRequestBuilder.setMapping(any(XContentBuilder.class))).thenReturn(createIndexRequestBuilder); when(createIndexRequestBuilder.get()).thenReturn(response); when(indicesAdminClient.prepareCreate(eq(index))).thenReturn(createIndexRequestBuilder); return this; } public MockClientBuilder prepareSearch(String index, int from, int size, SearchResponse response, ArgumentCaptor<QueryBuilder> filter) { SearchRequestBuilder builder = mock(SearchRequestBuilder.class); when(builder.addSort(any(SortBuilder.class))).thenReturn(builder); when(builder.setQuery(filter.capture())).thenReturn(builder); when(builder.setPostFilter(filter.capture())).thenReturn(builder); when(builder.setFrom(eq(from))).thenReturn(builder); when(builder.setSize(eq(size))).thenReturn(builder); when(builder.setFetchSource(eq(true))).thenReturn(builder); when(builder.addDocValueField(any(String.class))).thenReturn(builder); when(builder.addDocValueField(any(String.class), any(String.class))).thenReturn(builder); when(builder.addSort(any(String.class), any(SortOrder.class))).thenReturn(builder); when(builder.get()).thenReturn(response); when(client.prepareSearch(eq(index))).thenReturn(builder); return this; } public MockClientBuilder prepareSearches(String index, SearchRequestBuilder first, SearchRequestBuilder... searches) { when(client.prepareSearch(eq(index))).thenReturn(first, searches); return this; } /** * Creates a {@link SearchResponse} with a {@link SearchHit} for each element of {@code docs} * @param indexName Index being searched * @param docs Returned in the SearchResponse * @return this */ @SuppressWarnings("unchecked") public MockClientBuilder prepareSearch(String indexName, List<BytesReference> docs) { SearchRequestBuilder builder = mock(SearchRequestBuilder.class); when(builder.setIndicesOptions(any())).thenReturn(builder); when(builder.setQuery(any())).thenReturn(builder); when(builder.setSource(any())).thenReturn(builder); when(builder.setSize(anyInt())).thenReturn(builder); SearchRequest request = new SearchRequest(indexName); when(builder.request()).thenReturn(request); when(client.prepareSearch(eq(indexName))).thenReturn(builder); SearchHit hits[] = new SearchHit[docs.size()]; for (int i = 0; i < docs.size(); i++) { SearchHit hit = new SearchHit(10); hit.sourceRef(docs.get(i)); hits[i] = hit; } SearchResponse response = mock(SearchResponse.class); SearchHits searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0.0f); when(response.getHits()).thenReturn(searchHits); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocationOnMock) { ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) invocationOnMock.getArguments()[1]; listener.onResponse(response); return null; } }).when(client).search(eq(request), any()); return this; } /* * Mock a search that returns search hits with fields. * The number of hits is the size of fields */ @SuppressWarnings("unchecked") public MockClientBuilder prepareSearchFields(String indexName, List<Map<String, DocumentField>> fields) { SearchRequestBuilder builder = mock(SearchRequestBuilder.class); when(builder.setIndicesOptions(any())).thenReturn(builder); when(builder.setQuery(any())).thenReturn(builder); when(builder.setSource(any())).thenReturn(builder); when(builder.setSize(anyInt())).thenReturn(builder); SearchRequest request = new SearchRequest(indexName); when(builder.request()).thenReturn(request); when(client.prepareSearch(eq(indexName))).thenReturn(builder); SearchHit hits[] = new SearchHit[fields.size()]; for (int i = 0; i < hits.length; i++) { SearchHit hit = new SearchHit(10, null, null, fields.get(i)); hits[i] = hit; } SearchResponse response = mock(SearchResponse.class); SearchHits searchHits = new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0.0f); when(response.getHits()).thenReturn(searchHits); doAnswer(new Answer<Void>() { @Override public Void answer(InvocationOnMock invocationOnMock) { ActionListener<SearchResponse> listener = (ActionListener<SearchResponse>) invocationOnMock.getArguments()[1]; listener.onResponse(response); return null; } }).when(client).search(eq(request), any()); return this; } @SuppressWarnings("unchecked") public MockClientBuilder prepareBulk(BulkResponse response) { PlainActionFuture<BulkResponse> actionFuture = mock(PlainActionFuture.class); BulkRequestBuilder builder = mock(BulkRequestBuilder.class); when(client.prepareBulk()).thenReturn(builder); when(builder.execute()).thenReturn(actionFuture); when(actionFuture.actionGet()).thenReturn(response); return this; } @SuppressWarnings("unchecked") public MockClientBuilder bulk(BulkResponse response) { ActionFuture<BulkResponse> actionFuture = mock(ActionFuture.class); when(client.bulk(any(BulkRequest.class))).thenReturn(actionFuture); when(actionFuture.actionGet()).thenReturn(response); return this; } public Client build() { return client; } }
/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.loader.data; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.Semaphore; /** * {@link RandomAccessData} implementation backed by a {@link RandomAccessFile}. * * @author Phillip Webb */ public class RandomAccessDataFile implements RandomAccessData { private static final int DEFAULT_CONCURRENT_READS = 4; private final File file; private final FilePool filePool; private final long offset; private final long length; /** * Create a new {@link RandomAccessDataFile} backed by the specified file. * @param file the underlying file * @throws IllegalArgumentException if the file is null or does not exist * @see #RandomAccessDataFile(File, int) */ public RandomAccessDataFile(File file) { this(file, DEFAULT_CONCURRENT_READS); } /** * Create a new {@link RandomAccessDataFile} backed by the specified file. * @param file the underlying file * @param concurrentReads the maximum number of concurrent reads allowed on the * underlying file before blocking * @throws IllegalArgumentException if the file is null or does not exist * @see #RandomAccessDataFile(File) */ public RandomAccessDataFile(File file, int concurrentReads) { if (file == null) { throw new IllegalArgumentException("File must not be null"); } if (!file.exists()) { throw new IllegalArgumentException( String.format("File %s must exist", file.getAbsolutePath())); } this.file = file; this.filePool = new FilePool(file, concurrentReads); this.offset = 0L; this.length = file.length(); } /** * Private constructor used to create a {@link #getSubsection(long, long) subsection}. * @param file the underlying file * @param pool the underlying pool * @param offset the offset of the section * @param length the length of the section */ private RandomAccessDataFile(File file, FilePool pool, long offset, long length) { this.file = file; this.filePool = pool; this.offset = offset; this.length = length; } /** * Returns the underlying File. * @return the underlying file */ public File getFile() { return this.file; } @Override public InputStream getInputStream(ResourceAccess access) throws IOException { return new DataInputStream(access); } @Override public RandomAccessData getSubsection(long offset, long length) { if (offset < 0 || length < 0 || offset + length > this.length) { throw new IndexOutOfBoundsException(); } return new RandomAccessDataFile(this.file, this.filePool, this.offset + offset, length); } @Override public long getSize() { return this.length; } public void close() throws IOException { this.filePool.close(); } /** * {@link RandomAccessDataInputStream} implementation for the * {@link RandomAccessDataFile}. */ private class DataInputStream extends InputStream { private RandomAccessFile file; private int position; DataInputStream(ResourceAccess access) throws IOException { if (access == ResourceAccess.ONCE) { this.file = new RandomAccessFile(RandomAccessDataFile.this.file, "r"); this.file.seek(RandomAccessDataFile.this.offset); } } @Override public int read() throws IOException { return doRead(null, 0, 1); } @Override public int read(byte[] b) throws IOException { return read(b, 0, b == null ? 0 : b.length); } @Override public int read(byte[] b, int off, int len) throws IOException { if (b == null) { throw new NullPointerException("Bytes must not be null"); } return doRead(b, off, len); } /** * Perform the actual read. * @param b the bytes to read or {@code null} when reading a single byte * @param off the offset of the byte array * @param len the length of data to read * @return the number of bytes read into {@code b} or the actual read byte if * {@code b} is {@code null}. Returns -1 when the end of the stream is reached * @throws IOException in case of I/O errors */ public int doRead(byte[] b, int off, int len) throws IOException { if (len == 0) { return 0; } int cappedLen = cap(len); if (cappedLen <= 0) { return -1; } RandomAccessFile file = this.file; try { if (file == null) { file = RandomAccessDataFile.this.filePool.acquire(); file.seek(RandomAccessDataFile.this.offset + this.position); } if (b == null) { int rtn = file.read(); moveOn(rtn == -1 ? 0 : 1); return rtn; } else { return (int) moveOn(file.read(b, off, cappedLen)); } } finally { if (this.file == null && file != null) { RandomAccessDataFile.this.filePool.release(file); } } } @Override public long skip(long n) throws IOException { return (n <= 0 ? 0 : moveOn(cap(n))); } @Override public void close() throws IOException { if (this.file != null) { this.file.close(); } } /** * Cap the specified value such that it cannot exceed the number of bytes * remaining. * @param n the value to cap * @return the capped value */ private int cap(long n) { return (int) Math.min(RandomAccessDataFile.this.length - this.position, n); } /** * Move the stream position forwards the specified amount. * @param amount the amount to move * @return the amount moved */ private long moveOn(int amount) { this.position += amount; return amount; } } /** * Manage a pool that can be used to perform concurrent reads on the underlying * {@link RandomAccessFile}. */ static class FilePool { private final File file; private final int size; private final Semaphore available; private final Queue<RandomAccessFile> files; FilePool(File file, int size) { this.file = file; this.size = size; this.available = new Semaphore(size); this.files = new ConcurrentLinkedQueue<>(); } public RandomAccessFile acquire() throws IOException { this.available.acquireUninterruptibly(); RandomAccessFile file = this.files.poll(); if (file != null) { return file; } return new RandomAccessFile(this.file, "r"); } public void release(RandomAccessFile file) { this.files.add(file); this.available.release(); } public void close() throws IOException { this.available.acquireUninterruptibly(this.size); try { RandomAccessFile pooledFile = this.files.poll(); while (pooledFile != null) { pooledFile.close(); pooledFile = this.files.poll(); } } finally { this.available.release(this.size); } } } }
package com.tutsplus.nearbymessages; import android.content.Context; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.text.TextUtils; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.ListView; import android.widget.Spinner; import android.widget.TextView; import com.google.android.gms.common.ConnectionResult; import com.google.android.gms.common.api.GoogleApiClient; import com.google.android.gms.common.api.ResultCallback; import com.google.android.gms.common.api.Status; import com.google.android.gms.nearby.Nearby; import com.google.android.gms.nearby.connection.Connections; import java.util.ArrayList; import java.util.List; public class MainActivity extends AppCompatActivity implements GoogleApiClient.ConnectionCallbacks, GoogleApiClient.OnConnectionFailedListener, Connections.ConnectionRequestListener, Connections.MessageListener, Connections.EndpointDiscoveryListener, View.OnClickListener { private GoogleApiClient mGoogleApiClient; private Spinner mTypeSpinner; private TextView mStatusText; private Button mConnectionButton; private Button mSendButton; private ListView mListView; private ViewGroup mSendTextContainer; private EditText mSendEditText; private ArrayAdapter<String> mMessageAdapter; private boolean mIsHost; private boolean mIsConnected; private String mRemoteHostEndpoint; private List<String> mRemotePeerEndpoints = new ArrayList<String>(); private static final long CONNECTION_TIME_OUT = 10000L; //Wifi or Ethernet for TVs private static int[] NETWORK_TYPES = {ConnectivityManager.TYPE_WIFI, ConnectivityManager.TYPE_ETHERNET }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); initViews(); } @Override protected void onStart() { super.onStart(); mGoogleApiClient.connect(); } @Override protected void onStop() { super.onStop(); if( mGoogleApiClient != null && mGoogleApiClient.isConnected() ) { Nearby.Connections.stopAdvertising(mGoogleApiClient); mGoogleApiClient.disconnect(); } } private void initViews() { mStatusText = (TextView) findViewById( R.id.text_status ); mConnectionButton = (Button) findViewById( R.id.button_connection ); mSendButton = (Button) findViewById( R.id.button_send ); mListView = (ListView) findViewById( R.id.list ); mSendTextContainer = (ViewGroup) findViewById( R.id.send_text_container ); mSendEditText = (EditText) findViewById( R.id.edit_text_send ); mTypeSpinner = (Spinner) findViewById( R.id.spinner_type ); setupButtons(); setupConnectionTypeSpinner(); setupMessageList(); mGoogleApiClient = new GoogleApiClient.Builder( this ) .addConnectionCallbacks( this ) .addOnConnectionFailedListener( this ) .addApi( Nearby.CONNECTIONS_API ) .build(); } private void setupButtons() { mConnectionButton.setOnClickListener(this); mSendButton.setOnClickListener(this); } private void setupConnectionTypeSpinner() { ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource( this, R.array.connection_types, android.R.layout.simple_spinner_item ); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); mTypeSpinner.setAdapter(adapter); } private void setupMessageList() { mMessageAdapter = new ArrayAdapter<String>( this, android.R.layout.simple_list_item_1 ); mListView.setAdapter( mMessageAdapter ); } private boolean isConnectedToNetwork() { ConnectivityManager connManager = (ConnectivityManager) getSystemService( Context.CONNECTIVITY_SERVICE ); for( int networkType : NETWORK_TYPES ) { NetworkInfo info = connManager.getNetworkInfo( networkType ); if( info != null && info.isConnectedOrConnecting() ) { return true; } } return false; } private void disconnect() { if( !isConnectedToNetwork() ) return; if( mIsHost ) { sendMessage( "Shutting down host" ); Nearby.Connections.stopAdvertising( mGoogleApiClient ); Nearby.Connections.stopAllEndpoints( mGoogleApiClient ); mIsHost = false; mStatusText.setText( "Not connected" ); mRemotePeerEndpoints.clear(); } else { if( !mIsConnected || TextUtils.isEmpty( mRemoteHostEndpoint ) ) { Nearby.Connections.stopDiscovery( mGoogleApiClient, getString( R.string.service_id ) ); return; } sendMessage( "Disconnecting" ); Nearby.Connections.disconnectFromEndpoint( mGoogleApiClient, mRemoteHostEndpoint ); mRemoteHostEndpoint = null; mStatusText.setText( "Disconnected" ); } mIsConnected = false; } private void advertise() { if( !isConnectedToNetwork() ) return; String name = "Nearby Advertising"; Nearby.Connections.startAdvertising( mGoogleApiClient, name, null, CONNECTION_TIME_OUT, this ).setResultCallback(new ResultCallback<Connections.StartAdvertisingResult>() { @Override public void onResult(Connections.StartAdvertisingResult result) { if (result.getStatus().isSuccess()) { mStatusText.setText("Advertising"); } } }); } private void discover() { if( !isConnectedToNetwork() ) return; String serviceId = getString( R.string.service_id ); Nearby.Connections.startDiscovery(mGoogleApiClient, serviceId, 10000L, this).setResultCallback(new ResultCallback<Status>() { @Override public void onResult(Status status) { if (status.isSuccess()) { mStatusText.setText( "Discovering" ); } } }); } @Override public void onConnected(Bundle bundle) { } @Override public void onConnectionSuspended(int i) { } @Override public void onConnectionRequest(final String remoteEndpointId, final String remoteDeviceId, final String remoteEndpointName, byte[] payload) { if( mIsHost ) { Nearby.Connections.acceptConnectionRequest( mGoogleApiClient, remoteEndpointId, payload, this ).setResultCallback(new ResultCallback<Status>() { @Override public void onResult(Status status) { if( status.isSuccess() ) { if( !mRemotePeerEndpoints.contains( remoteEndpointId ) ) { mRemotePeerEndpoints.add( remoteEndpointId ); } getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); mMessageAdapter.notifyDataSetChanged(); sendMessage(remoteDeviceId + " connected!"); mSendTextContainer.setVisibility( View.VISIBLE ); } } }); } else { Nearby.Connections.rejectConnectionRequest(mGoogleApiClient, remoteEndpointId ); } } private void sendMessage( String message ) { if( mIsHost ) { Nearby.Connections.sendReliableMessage(mGoogleApiClient, mRemotePeerEndpoints, message.getBytes()); mMessageAdapter.add(message); mMessageAdapter.notifyDataSetChanged(); } else { Nearby.Connections.sendReliableMessage( mGoogleApiClient, mRemoteHostEndpoint, ( Nearby.Connections.getLocalDeviceId( mGoogleApiClient ) + " says: " + message ).getBytes() ); } } @Override public void onEndpointFound(String endpointId, String deviceId, final String serviceId, String endpointName) { byte[] payload = null; Nearby.Connections.sendConnectionRequest( mGoogleApiClient, deviceId, endpointId, payload, new Connections.ConnectionResponseCallback() { @Override public void onConnectionResponse(String s, Status status, byte[] bytes) { if( status.isSuccess() ) { mStatusText.setText( "Connected to: " + s ); Nearby.Connections.stopDiscovery(mGoogleApiClient, serviceId); mRemoteHostEndpoint = s; getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); mSendTextContainer.setVisibility(View.VISIBLE); if( !mIsHost ) { mIsConnected = true; } } else { mStatusText.setText( "Connection to " + s + " failed" ); if( !mIsHost ) { mIsConnected = false; } } } }, this ); } @Override public void onEndpointLost(String s) { if( !mIsHost ) { mIsConnected = false; } } @Override public void onMessageReceived(String endpointId, byte[] payload, boolean isReliable) { mMessageAdapter.add( new String( payload ) ); mMessageAdapter.notifyDataSetChanged(); if( mIsHost ) { sendMessage( new String( payload ) ); } } @Override public void onDisconnected(String s) { if( !mIsHost ) { mIsConnected = false; } } @Override public void onConnectionFailed(ConnectionResult connectionResult) { if( !mIsHost ) { mIsConnected = false; } } @Override public void onClick(View v) { switch( v.getId() ) { case R.id.button_connection: { if( mIsConnected ) { disconnect(); mStatusText.setText("Disconnected"); } else if( getString( R.string.connection_type_host ).equalsIgnoreCase( mTypeSpinner.getSelectedItem().toString() ) ) { mIsHost = true; advertise(); } else { mIsHost = false; discover(); } break; } case R.id.button_send: { if( !TextUtils.isEmpty( mSendEditText.getText() ) && mIsConnected || ( mRemotePeerEndpoints != null && !mRemotePeerEndpoints.isEmpty() ) ) { sendMessage( mSendEditText.getText().toString() ); mSendEditText.setText( "" ); } break; } } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.testFramework; import com.intellij.codeInsight.CodeInsightSettings; import com.intellij.diagnostic.PerformanceWatcher; import com.intellij.mock.MockApplication; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.application.impl.ApplicationInfoImpl; import com.intellij.openapi.command.impl.StartMarkAction; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.io.FileSystemUtil; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.*; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.codeStyle.CodeStyleSchemes; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.refactoring.rename.inplace.InplaceRefactoring; import com.intellij.rt.execution.junit.FileComparisonFailure; import com.intellij.testFramework.exceptionCases.AbstractExceptionCase; import com.intellij.util.*; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.hash.HashMap; import com.intellij.util.ui.UIUtil; import gnu.trove.THashSet; import junit.framework.AssertionFailedError; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.intellij.lang.annotations.RegExp; import org.jdom.Element; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Assert; import javax.swing.*; import javax.swing.Timer; import java.awt.*; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.security.SecureRandom; import java.util.*; import java.util.List; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; /** * @author peter */ @SuppressWarnings("UseOfSystemOutOrSystemErr") public abstract class UsefulTestCase extends TestCase { public static final boolean IS_UNDER_TEAMCITY = System.getenv("TEAMCITY_VERSION") != null; @Deprecated public static final String IDEA_MARKER_CLASS = "com.intellij.openapi.roots.IdeaModifiableModelsProvider"; public static final String TEMP_DIR_MARKER = "unitTest_"; protected static boolean OVERWRITE_TESTDATA = false; private static final String DEFAULT_SETTINGS_EXTERNALIZED; private static final Random RNG = new SecureRandom(); private static final String ORIGINAL_TEMP_DIR = FileUtil.getTempDirectory(); public static Map<String, Long> TOTAL_SETUP_COST_MILLIS = new HashMap<String, Long>(); public static Map<String, Long> TOTAL_TEARDOWN_COST_MILLIS = new HashMap<String, Long>(); @NotNull protected final Disposable myTestRootDisposable = new Disposable() { @Override public void dispose() { } @Override public String toString() { String testName = getTestName(false); return UsefulTestCase.this.getClass() + (StringUtil.isEmpty(testName) ? "" : ".test" + testName); } }; protected static String ourPathToKeep = null; private List<String> myPathsToKeep = new ArrayList<String>(); private CodeStyleSettings myOldCodeStyleSettings; private String myTempDir; protected static final Key<String> CREATION_PLACE = Key.create("CREATION_PLACE"); static { // Radar #5755208: Command line Java applications need a way to launch without a Dock icon. System.setProperty("apple.awt.UIElement", "true"); try { CodeInsightSettings defaultSettings = new CodeInsightSettings(); Element oldS = new Element("temp"); defaultSettings.writeExternal(oldS); DEFAULT_SETTINGS_EXTERNALIZED = JDOMUtil.writeElement(oldS, "\n"); } catch (Exception e) { throw new RuntimeException(e); } } private boolean oldDisposerDebug; protected boolean shouldContainTempFiles() { return true; } @Override protected void setUp() throws Exception { super.setUp(); if (shouldContainTempFiles()) { String testName = FileUtil.sanitizeFileName(getTestName(true)); if (StringUtil.isEmptyOrSpaces(testName)) testName = ""; testName = new File(testName).getName(); // in case the test name contains file separators myTempDir = new File(ORIGINAL_TEMP_DIR, TEMP_DIR_MARKER + testName).getPath(); FileUtil.resetCanonicalTempPathCache(myTempDir); } boolean isPerformanceTest = isPerformanceTest(); ApplicationInfoImpl.setInPerformanceTest(isPerformanceTest); // turn off Disposer debugging for performance tests oldDisposerDebug = Disposer.setDebugMode(Disposer.isDebugMode() && !isPerformanceTest); } @Override protected void tearDown() throws Exception { try { Disposer.dispose(myTestRootDisposable); cleanupSwingDataStructures(); cleanupDeleteOnExitHookList(); } finally { Disposer.setDebugMode(oldDisposerDebug); if (shouldContainTempFiles()) { FileUtil.resetCanonicalTempPathCache(ORIGINAL_TEMP_DIR); if (hasTmpFilesToKeep()) { File[] files = new File(myTempDir).listFiles(); if (files != null) { for (File file : files) { if (!shouldKeepTmpFile(file)) { FileUtil.delete(file); } } } } else { FileUtil.delete(new File(myTempDir)); } } } UIUtil.removeLeakingAppleListeners(); super.tearDown(); } protected void addTmpFileToKeep(File file) { myPathsToKeep.add(file.getPath()); } private boolean hasTmpFilesToKeep() { return ourPathToKeep != null && FileUtil.isAncestor(myTempDir, ourPathToKeep, false) || !myPathsToKeep.isEmpty(); } private boolean shouldKeepTmpFile(File file) { String path = file.getPath(); if (FileUtil.pathsEqual(path, ourPathToKeep)) return true; for (String pathToKeep : myPathsToKeep) { if (FileUtil.pathsEqual(path, pathToKeep)) return true; } return false; } private static final Set<String> DELETE_ON_EXIT_HOOK_DOT_FILES; private static final Class DELETE_ON_EXIT_HOOK_CLASS; static { Class<?> aClass; try { aClass = Class.forName("java.io.DeleteOnExitHook"); } catch (Exception e) { throw new RuntimeException(e); } Set<String> files = ReflectionUtil.getStaticFieldValue(aClass, Set.class, "files"); DELETE_ON_EXIT_HOOK_CLASS = aClass; DELETE_ON_EXIT_HOOK_DOT_FILES = files; } public static void cleanupDeleteOnExitHookList() throws ClassNotFoundException, NoSuchFieldException, IllegalAccessException { // try to reduce file set retained by java.io.DeleteOnExitHook List<String> list; synchronized (DELETE_ON_EXIT_HOOK_CLASS) { if (DELETE_ON_EXIT_HOOK_DOT_FILES.isEmpty()) return; list = new ArrayList<String>(DELETE_ON_EXIT_HOOK_DOT_FILES); } for (int i = list.size() - 1; i >= 0; i--) { String path = list.get(i); if (FileSystemUtil.getAttributes(path) == null || new File(path).delete()) { synchronized (DELETE_ON_EXIT_HOOK_CLASS) { DELETE_ON_EXIT_HOOK_DOT_FILES.remove(path); } } } } private static void cleanupSwingDataStructures() throws Exception { Object manager = ReflectionUtil.getDeclaredMethod(Class.forName("javax.swing.KeyboardManager"), "getCurrentManager").invoke(null); Map componentKeyStrokeMap = ReflectionUtil.getField(manager.getClass(), manager, Hashtable.class, "componentKeyStrokeMap"); componentKeyStrokeMap.clear(); Map containerMap = ReflectionUtil.getField(manager.getClass(), manager, Hashtable.class, "containerMap"); containerMap.clear(); } protected void checkForSettingsDamage(@NotNull List<Throwable> exceptions) { Application app = ApplicationManager.getApplication(); if (isPerformanceTest() || app == null || app instanceof MockApplication) { return; } CodeStyleSettings oldCodeStyleSettings = myOldCodeStyleSettings; if (oldCodeStyleSettings == null) { return; } myOldCodeStyleSettings = null; doCheckForSettingsDamage(oldCodeStyleSettings, getCurrentCodeStyleSettings(), exceptions); } public static void doCheckForSettingsDamage(@NotNull CodeStyleSettings oldCodeStyleSettings, @NotNull CodeStyleSettings currentCodeStyleSettings, @NotNull List<Throwable> exceptions) { final CodeInsightSettings settings = CodeInsightSettings.getInstance(); try { Element newS = new Element("temp"); settings.writeExternal(newS); Assert.assertEquals("Code insight settings damaged", DEFAULT_SETTINGS_EXTERNALIZED, JDOMUtil.writeElement(newS, "\n")); } catch (AssertionError error) { CodeInsightSettings clean = new CodeInsightSettings(); for (Field field : clean.getClass().getFields()) { try { ReflectionUtil.copyFieldValue(clean, settings, field); } catch (Exception ignored) { } } exceptions.add(error); } currentCodeStyleSettings.getIndentOptions(StdFileTypes.JAVA); try { checkSettingsEqual(oldCodeStyleSettings, currentCodeStyleSettings, "Code style settings damaged"); } catch (Throwable e) { exceptions.add(e); } finally { currentCodeStyleSettings.clearCodeStyleSettings(); } try { InplaceRefactoring.checkCleared(); } catch (AssertionError e) { exceptions.add(e); } try { StartMarkAction.checkCleared(); } catch (AssertionError e) { exceptions.add(e); } } protected void storeSettings() { if (!isPerformanceTest() && ApplicationManager.getApplication() != null) { myOldCodeStyleSettings = getCurrentCodeStyleSettings().clone(); myOldCodeStyleSettings.getIndentOptions(StdFileTypes.JAVA); } } protected CodeStyleSettings getCurrentCodeStyleSettings() { if (CodeStyleSchemes.getInstance().getCurrentScheme() == null) return new CodeStyleSettings(); return CodeStyleSettingsManager.getInstance().getCurrentSettings(); } @NotNull public final Disposable getTestRootDisposable() { return myTestRootDisposable; } @Override protected void runTest() throws Throwable { final Throwable[] throwables = new Throwable[1]; Runnable runnable = new Runnable() { @Override public void run() { try { UsefulTestCase.super.runTest(); } catch (InvocationTargetException e) { e.fillInStackTrace(); throwables[0] = e.getTargetException(); } catch (IllegalAccessException e) { e.fillInStackTrace(); throwables[0] = e; } catch (Throwable e) { throwables[0] = e; } } }; invokeTestRunnable(runnable); if (throwables[0] != null) { throw throwables[0]; } } protected boolean shouldRunTest() { return PlatformTestUtil.canRunTest(getClass()); } public static void edt(@NotNull Runnable r) { EdtTestUtil.runInEdtAndWait(r); } protected void invokeTestRunnable(@NotNull Runnable runnable) throws Exception { EdtTestUtil.runInEdtAndWait(runnable); } protected void defaultRunBare() throws Throwable { Throwable exception = null; try { long setupStart = System.nanoTime(); setUp(); long setupCost = (System.nanoTime() - setupStart) / 1000000; logPerClassCost(setupCost, TOTAL_SETUP_COST_MILLIS); runTest(); } catch (Throwable running) { exception = running; } finally { try { long teardownStart = System.nanoTime(); tearDown(); long teardownCost = (System.nanoTime() - teardownStart) / 1000000; logPerClassCost(teardownCost, TOTAL_TEARDOWN_COST_MILLIS); } catch (Throwable tearingDown) { if (exception == null) exception = tearingDown; } } if (exception != null) throw exception; } /** * Logs the setup cost grouped by test fixture class (superclass of the current test class). * * @param cost setup cost in milliseconds */ private void logPerClassCost(long cost, Map<String, Long> costMap) { Class<?> superclass = getClass().getSuperclass(); Long oldCost = costMap.get(superclass.getName()); long newCost = oldCost == null ? cost : oldCost + cost; costMap.put(superclass.getName(), newCost); } public static void logSetupTeardownCosts() { long totalSetup = 0, totalTeardown = 0; System.out.println("Setup costs"); for (Map.Entry<String, Long> entry : TOTAL_SETUP_COST_MILLIS.entrySet()) { System.out.println(String.format(" %s: %d ms", entry.getKey(), entry.getValue())); totalSetup += entry.getValue(); } System.out.println("Teardown costs"); for (Map.Entry<String, Long> entry : TOTAL_TEARDOWN_COST_MILLIS.entrySet()) { System.out.println(String.format(" %s: %d ms", entry.getKey(), entry.getValue())); totalTeardown += entry.getValue(); } System.out.println(String.format("Total overhead: setup %d ms, teardown %d ms", totalSetup, totalTeardown)); System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.totalSetupMs' value='%d']", totalSetup)); System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.totalTeardownMs' value='%d']", totalTeardown)); } @Override public void runBare() throws Throwable { if (!shouldRunTest()) return; if (runInDispatchThread()) { TestRunnerUtil.replaceIdeEventQueueSafely(); EdtTestUtil.runInEdtAndWait(new ThrowableRunnable<Throwable>() { @Override public void run() throws Throwable { defaultRunBare(); } }); } else { defaultRunBare(); } } protected boolean runInDispatchThread() { return true; } @NonNls public static String toString(Iterable<?> collection) { if (!collection.iterator().hasNext()) { return "<empty>"; } final StringBuilder builder = new StringBuilder(); for (final Object o : collection) { if (o instanceof THashSet) { builder.append(new TreeSet<Object>((THashSet)o)); } else { builder.append(o); } builder.append("\n"); } return builder.toString(); } public static <T> void assertOrderedEquals(T[] actual, T... expected) { assertOrderedEquals(Arrays.asList(actual), expected); } public static <T> void assertOrderedEquals(Iterable<T> actual, T... expected) { assertOrderedEquals(null, actual, expected); } public static void assertOrderedEquals(@NotNull byte[] actual, @NotNull byte[] expected) { assertEquals(actual.length, expected.length); for (int i = 0; i < actual.length; i++) { byte a = actual[i]; byte e = expected[i]; assertEquals("not equals at index: "+i, e, a); } } public static void assertOrderedEquals(@NotNull int[] actual, @NotNull int[] expected) { if (actual.length != expected.length) { fail("Expected size: "+expected.length+"; actual: "+actual.length+"\nexpected: "+Arrays.toString(expected)+"\nactual : "+Arrays.toString(actual)); } for (int i = 0; i < actual.length; i++) { int a = actual[i]; int e = expected[i]; assertEquals("not equals at index: "+i, e, a); } } public static <T> void assertOrderedEquals(final String errorMsg, @NotNull Iterable<T> actual, @NotNull T... expected) { Assert.assertNotNull(actual); Assert.assertNotNull(expected); assertOrderedEquals(errorMsg, actual, Arrays.asList(expected)); } public static <T> void assertOrderedEquals(final Iterable<? extends T> actual, final Collection<? extends T> expected) { assertOrderedEquals(null, actual, expected); } public static <T> void assertOrderedEquals(final String erroMsg, final Iterable<? extends T> actual, final Collection<? extends T> expected) { ArrayList<T> list = new ArrayList<T>(); for (T t : actual) { list.add(t); } if (!list.equals(new ArrayList<T>(expected))) { String expectedString = toString(expected); String actualString = toString(actual); Assert.assertEquals(erroMsg, expectedString, actualString); Assert.fail("Warning! 'toString' does not reflect the difference.\nExpected: " + expectedString + "\nActual: " + actualString); } } public static <T> void assertOrderedCollection(T[] collection, @NotNull Consumer<T>... checkers) { Assert.assertNotNull(collection); assertOrderedCollection(Arrays.asList(collection), checkers); } public static <T> void assertSameElements(T[] collection, T... expected) { assertSameElements(Arrays.asList(collection), expected); } public static <T> void assertSameElements(Collection<? extends T> collection, T... expected) { assertSameElements(collection, Arrays.asList(expected)); } public static <T> void assertSameElements(Collection<? extends T> collection, Collection<T> expected) { assertSameElements(null, collection, expected); } public static <T> void assertSameElements(String message, Collection<? extends T> collection, Collection<T> expected) { assertNotNull(collection); assertNotNull(expected); if (collection.size() != expected.size() || !new HashSet<T>(expected).equals(new HashSet<T>(collection))) { Assert.assertEquals(message, toString(expected, "\n"), toString(collection, "\n")); Assert.assertEquals(message, new HashSet<T>(expected), new HashSet<T>(collection)); } } public static <T> void assertContainsOrdered(Collection<? extends T> collection, T... expected) { assertContainsOrdered(collection, Arrays.asList(expected)); } public static <T> void assertContainsOrdered(Collection<? extends T> collection, Collection<T> expected) { ArrayList<T> copy = new ArrayList<T>(collection); copy.retainAll(expected); assertOrderedEquals(toString(collection), copy, expected); } public static <T> void assertContainsElements(Collection<? extends T> collection, T... expected) { assertContainsElements(collection, Arrays.asList(expected)); } public static <T> void assertContainsElements(Collection<? extends T> collection, Collection<T> expected) { ArrayList<T> copy = new ArrayList<T>(collection); copy.retainAll(expected); assertSameElements(toString(collection), copy, expected); } public static String toString(Object[] collection, String separator) { return toString(Arrays.asList(collection), separator); } public static <T> void assertDoesntContain(Collection<? extends T> collection, T... notExpected) { assertDoesntContain(collection, Arrays.asList(notExpected)); } public static <T> void assertDoesntContain(Collection<? extends T> collection, Collection<T> notExpected) { ArrayList<T> expected = new ArrayList<T>(collection); expected.removeAll(notExpected); assertSameElements(collection, expected); } public static String toString(Collection<?> collection, String separator) { List<String> list = ContainerUtil.map2List(collection, new Function<Object, String>() { @Override public String fun(final Object o) { return String.valueOf(o); } }); Collections.sort(list); StringBuilder builder = new StringBuilder(); boolean flag = false; for (final String o : list) { if (flag) { builder.append(separator); } builder.append(o); flag = true; } return builder.toString(); } public static <T> void assertOrderedCollection(Collection<? extends T> collection, Consumer<T>... checkers) { Assert.assertNotNull(collection); if (collection.size() != checkers.length) { Assert.fail(toString(collection)); } int i = 0; for (final T actual : collection) { try { checkers[i].consume(actual); } catch (AssertionFailedError e) { System.out.println(i + ": " + actual); throw e; } i++; } } public static <T> void assertUnorderedCollection(T[] collection, Consumer<T>... checkers) { assertUnorderedCollection(Arrays.asList(collection), checkers); } public static <T> void assertUnorderedCollection(Collection<? extends T> collection, Consumer<T>... checkers) { Assert.assertNotNull(collection); if (collection.size() != checkers.length) { Assert.fail(toString(collection)); } Set<Consumer<T>> checkerSet = new HashSet<Consumer<T>>(Arrays.asList(checkers)); int i = 0; Throwable lastError = null; for (final T actual : collection) { boolean flag = true; for (final Consumer<T> condition : checkerSet) { Throwable error = accepts(condition, actual); if (error == null) { checkerSet.remove(condition); flag = false; break; } else { lastError = error; } } if (flag) { lastError.printStackTrace(); Assert.fail("Incorrect element(" + i + "): " + actual); } i++; } } private static <T> Throwable accepts(final Consumer<T> condition, final T actual) { try { condition.consume(actual); return null; } catch (Throwable e) { return e; } } @Contract("null, _ -> fail") public static <T> T assertInstanceOf(Object o, Class<T> aClass) { Assert.assertNotNull("Expected instance of: " + aClass.getName() + " actual: " + null, o); Assert.assertTrue("Expected instance of: " + aClass.getName() + " actual: " + o.getClass().getName(), aClass.isInstance(o)); @SuppressWarnings("unchecked") T t = (T)o; return t; } public static <T> T assertOneElement(Collection<T> collection) { Assert.assertNotNull(collection); Iterator<T> iterator = collection.iterator(); String toString = toString(collection); Assert.assertTrue(toString, iterator.hasNext()); T t = iterator.next(); Assert.assertFalse(toString, iterator.hasNext()); return t; } public static <T> T assertOneElement(T[] ts) { Assert.assertNotNull(ts); Assert.assertEquals(Arrays.asList(ts).toString(), 1, ts.length); return ts[0]; } @Contract("null, _ -> fail") public static <T> void assertOneOf(T value, T... values) { boolean found = false; for (T v : values) { if (value == v || value != null && value.equals(v)) { found = true; } } Assert.assertTrue(value + " should be equal to one of " + Arrays.toString(values), found); } public static void printThreadDump() { PerformanceWatcher.dumpThreadsToConsole("Thread dump:"); } public static void assertEmpty(final Object[] array) { assertOrderedEquals(array); } public static void assertNotEmpty(final Collection<?> collection) { if (collection == null) return; assertTrue(!collection.isEmpty()); } public static void assertEmpty(final Collection<?> collection) { assertEmpty(collection.toString(), collection); } public static void assertNullOrEmpty(final Collection<?> collection) { if (collection == null) return; assertEmpty(null, collection); } public static void assertEmpty(final String s) { assertTrue(s, StringUtil.isEmpty(s)); } public static <T> void assertEmpty(final String errorMsg, final Collection<T> collection) { assertOrderedEquals(errorMsg, collection); } public static void assertSize(int expectedSize, final Object[] array) { assertEquals(toString(Arrays.asList(array)), expectedSize, array.length); } public static void assertSize(int expectedSize, final Collection<?> c) { assertEquals(toString(c), expectedSize, c.size()); } protected <T extends Disposable> T disposeOnTearDown(final T disposable) { Disposer.register(myTestRootDisposable, disposable); return disposable; } public static void assertSameLines(String expected, String actual) { String expectedText = StringUtil.convertLineSeparators(expected.trim()); String actualText = StringUtil.convertLineSeparators(actual.trim()); Assert.assertEquals(expectedText, actualText); } public static void assertExists(File file){ assertTrue("File should exist " + file, file.exists()); } public static void assertDoesntExist(File file){ assertFalse("File should not exist " + file, file.exists()); } protected String getTestName(boolean lowercaseFirstLetter) { return getTestName(getName(), lowercaseFirstLetter); } public static String getTestName(String name, boolean lowercaseFirstLetter) { return name == null ? "" : PlatformTestUtil.getTestName(name, lowercaseFirstLetter); } /** @deprecated use {@link PlatformTestUtil#lowercaseFirstLetter(String, boolean)} (to be removed in IDEA 17) */ @SuppressWarnings("unused") public static String lowercaseFirstLetter(String name, boolean lowercaseFirstLetter) { return PlatformTestUtil.lowercaseFirstLetter(name, lowercaseFirstLetter); } /** @deprecated use {@link PlatformTestUtil#isAllUppercaseName(String)} (to be removed in IDEA 17) */ @SuppressWarnings("unused") public static boolean isAllUppercaseName(String name) { return PlatformTestUtil.isAllUppercaseName(name); } protected String getTestDirectoryName() { final String testName = getTestName(true); return testName.replaceAll("_.*", ""); } public static void assertSameLinesWithFile(String filePath, String actualText) { assertSameLinesWithFile(filePath, actualText, true); } public static void assertSameLinesWithFile(String filePath, String actualText, boolean trimBeforeComparing) { String fileText; try { if (OVERWRITE_TESTDATA) { VfsTestUtil.overwriteTestData(filePath, actualText); System.out.println("File " + filePath + " created."); } fileText = FileUtil.loadFile(new File(filePath), CharsetToolkit.UTF8_CHARSET); } catch (FileNotFoundException e) { VfsTestUtil.overwriteTestData(filePath, actualText); throw new AssertionFailedError("No output text found. File " + filePath + " created."); } catch (IOException e) { throw new RuntimeException(e); } String expected = StringUtil.convertLineSeparators(trimBeforeComparing ? fileText.trim() : fileText); String actual = StringUtil.convertLineSeparators(trimBeforeComparing ? actualText.trim() : actualText); if (!Comparing.equal(expected, actual)) { throw new FileComparisonFailure(null, expected, actual, filePath); } } public static void clearFields(final Object test) throws IllegalAccessException { Class aClass = test.getClass(); while (aClass != null) { clearDeclaredFields(test, aClass); aClass = aClass.getSuperclass(); } } public static void clearDeclaredFields(Object test, Class aClass) throws IllegalAccessException { if (aClass == null) return; for (final Field field : aClass.getDeclaredFields()) { @NonNls final String name = field.getDeclaringClass().getName(); if (!name.startsWith("junit.framework.") && !name.startsWith("com.intellij.testFramework.")) { final int modifiers = field.getModifiers(); if ((modifiers & Modifier.FINAL) == 0 && (modifiers & Modifier.STATIC) == 0 && !field.getType().isPrimitive()) { field.setAccessible(true); field.set(test, null); } } } } @SuppressWarnings("deprecation") protected static void checkSettingsEqual(CodeStyleSettings expected, CodeStyleSettings settings, String message) throws Exception { if (expected == null || settings == null) return; Element oldS = new Element("temp"); expected.writeExternal(oldS); Element newS = new Element("temp"); settings.writeExternal(newS); String newString = JDOMUtil.writeElement(newS, "\n"); String oldString = JDOMUtil.writeElement(oldS, "\n"); Assert.assertEquals(message, oldString, newString); } public boolean isPerformanceTest() { String name = getName(); return name != null && name.contains("Performance") || getClass().getName().contains("Performance"); } public static void doPostponedFormatting(final Project project) { DocumentUtil.writeInRunUndoTransparentAction(new Runnable() { @Override public void run() { PsiDocumentManager.getInstance(project).commitAllDocuments(); PostprocessReformattingAspect.getInstance(project).doPostponedFormatting(); } }); } protected static void checkAllTimersAreDisposed(@NotNull List<Throwable> exceptions) { Field firstTimerF; Object timerQueue; Object timer; try { Class<?> TimerQueueC = Class.forName("javax.swing.TimerQueue"); Method sharedInstance = TimerQueueC.getDeclaredMethod("sharedInstance"); sharedInstance.setAccessible(true); firstTimerF = ReflectionUtil.getDeclaredField(TimerQueueC, "firstTimer"); timerQueue = sharedInstance.invoke(null); if (firstTimerF == null) { // jdk 8 DelayQueue delayQueue = ReflectionUtil.getField(TimerQueueC, timerQueue, DelayQueue.class, "queue"); timer = delayQueue.peek(); } else { // ancient jdk firstTimerF.setAccessible(true); timer = firstTimerF.get(timerQueue); } } catch (Throwable e) { exceptions.add(e); return; } if (timer != null) { if (firstTimerF != null) { ReflectionUtil.resetField(timerQueue, firstTimerF); } String text = ""; if (timer instanceof Delayed) { long delay = ((Delayed)timer).getDelay(TimeUnit.MILLISECONDS); text = "(delayed for "+delay+"ms)"; Method getTimer = ReflectionUtil.getDeclaredMethod(timer.getClass(), "getTimer"); getTimer.setAccessible(true); try { timer = getTimer.invoke(timer); } catch (Exception e) { exceptions.add(e); return; } } Timer t = (Timer)timer; text = "Timer (listeners: "+Arrays.asList(t.getActionListeners()) + ") "+text; exceptions.add(new AssertionFailedError("Not disposed Timer: " + text + "; queue:" + timerQueue)); } } /** * Checks that code block throw corresponding exception. * * @param exceptionCase Block annotated with some exception type * @throws Throwable */ protected void assertException(final AbstractExceptionCase exceptionCase) throws Throwable { assertException(exceptionCase, null); } /** * Checks that code block throw corresponding exception with expected error msg. * If expected error message is null it will not be checked. * * @param exceptionCase Block annotated with some exception type * @param expectedErrorMsg expected error messge * @throws Throwable */ protected void assertException(final AbstractExceptionCase exceptionCase, @Nullable final String expectedErrorMsg) throws Throwable { assertExceptionOccurred(true, exceptionCase, expectedErrorMsg); } /** * Checks that code block doesn't throw corresponding exception. * * @param exceptionCase Block annotated with some exception type * @throws Throwable */ protected void assertNoException(final AbstractExceptionCase exceptionCase) throws Throwable { assertExceptionOccurred(false, exceptionCase, null); } protected void assertNoThrowable(final Runnable closure) { String throwableName = null; try { closure.run(); } catch (Throwable thr) { throwableName = thr.getClass().getName(); } assertNull(throwableName); } private static void assertExceptionOccurred(boolean shouldOccur, AbstractExceptionCase exceptionCase, String expectedErrorMsg) throws Throwable { boolean wasThrown = false; try { exceptionCase.tryClosure(); } catch (Throwable e) { if (shouldOccur) { wasThrown = true; final String errorMessage = exceptionCase.getAssertionErrorMessage(); assertEquals(errorMessage, exceptionCase.getExpectedExceptionClass(), e.getClass()); if (expectedErrorMsg != null) { assertEquals("Compare error messages", expectedErrorMsg, e.getMessage()); } } else if (exceptionCase.getExpectedExceptionClass().equals(e.getClass())) { wasThrown = true; System.out.println(""); e.printStackTrace(System.out); fail("Exception isn't expected here. Exception message: " + e.getMessage()); } else { throw e; } } finally { if (shouldOccur && !wasThrown) { fail(exceptionCase.getAssertionErrorMessage()); } } } protected boolean annotatedWith(@NotNull Class annotationClass) { Class<?> aClass = getClass(); String methodName = "test" + getTestName(false); boolean methodChecked = false; while (aClass != null && aClass != Object.class) { if (aClass.getAnnotation(annotationClass) != null) return true; if (!methodChecked) { Method method = ReflectionUtil.getDeclaredMethod(aClass, methodName); if (method != null) { if (method.getAnnotation(annotationClass) != null) return true; methodChecked = true; } } aClass = aClass.getSuperclass(); } return false; } protected String getHomePath() { return PathManager.getHomePath().replace(File.separatorChar, '/'); } protected static boolean isInHeadlessEnvironment() { return GraphicsEnvironment.isHeadless(); } public static void refreshRecursively(@NotNull VirtualFile file) { VfsUtilCore.visitChildrenRecursively(file, new VirtualFileVisitor() { @Override public boolean visitFile(@NotNull VirtualFile file) { file.getChildren(); return true; } }); file.refresh(false, true); } @NotNull public static Test filteredSuite(@RegExp String regexp, @NotNull Test test) { final Pattern pattern = Pattern.compile(regexp); final TestSuite testSuite = new TestSuite(); new Processor<Test>() { @Override public boolean process(Test test) { if (test instanceof TestSuite) { for (int i = 0, len = ((TestSuite)test).testCount(); i < len; i++) { process(((TestSuite)test).testAt(i)); } } else if (pattern.matcher(test.toString()).find()) { testSuite.addTest(test); } return false; } }.process(test); return testSuite; } @Nullable public static VirtualFile refreshAndFindFile(@NotNull final File file) { return UIUtil.invokeAndWaitIfNeeded(new Computable<VirtualFile>() { @Override public VirtualFile compute() { return LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file); } }); } public static <E extends Exception> void invokeAndWaitIfNeeded(@NotNull final ThrowableRunnable<E> runnable) throws Exception { if (SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { final Ref<Exception> ref = Ref.create(); SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { try { runnable.run(); } catch (Exception e) { ref.set(e); } } }); if (!ref.isNull()) throw ref.get(); } } }
package org.apache.solr.rest.schema; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.commons.io.FileUtils; import org.apache.solr.util.RestTestBase; import org.apache.solr.util.RestTestHarness; import org.junit.After; import org.junit.Before; import org.noggit.JSONParser; import org.noggit.ObjectBuilder; import java.io.File; import java.io.StringReader; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; public class TestBulkSchemaAPI extends RestTestBase { private static File tmpSolrHome; @Before public void before() throws Exception { tmpSolrHome = createTempDir().toFile(); FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile()); System.setProperty("managed.schema.mutable", "true"); System.setProperty("enable.update.log", "false"); createJettyAndHarness(tmpSolrHome.getAbsolutePath(), "solrconfig-managed-schema.xml", "schema-rest.xml", "/solr", true, null); } @After public void after() throws Exception { if (jetty != null) { jetty.stop(); jetty = null; } client = null; if (restTestHarness != null) { restTestHarness.close(); } restTestHarness = null; } public void testMultipleAddFieldWithErrors() throws Exception { String payload = "{\n" + " 'add-field' : {\n" + " 'name':'a1',\n" + " 'type': 'string1',\n" + " 'stored':true,\n" + " 'indexed':false\n" + " },\n" + " 'add-field' : {\n" + " 'type': 'string',\n" + " 'stored':true,\n" + " 'indexed':true\n" + " }\n" + " }"; String response = restTestHarness.post("/schema?wt=json", json(payload)); Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response))); List l = (List) map.get("errors"); List errorList = (List) ((Map) l.get(0)).get("errorMessages"); assertEquals(1, errorList.size()); assertTrue (((String)errorList.get(0)).contains("No such field type")); errorList = (List) ((Map) l.get(1)).get("errorMessages"); assertEquals(1, errorList.size()); assertTrue (((String)errorList.get(0)).contains("is a required field")); } public void testMultipleCommands() throws Exception{ RestTestHarness harness = restTestHarness; Map m = getObj(harness, "wdf_nocase", "fields"); assertNotNull("'wdf_nocase' field does not exist in the schema", m); m = getObj(harness, "wdf_nocase", "fieldTypes"); assertNotNull("'wdf_nocase' field type does not exist in the schema", m); m = getObj(harness, "boolean", "fieldTypes"); assertNotNull("'boolean' field type does not exist in the schema", m); assertNull(m.get("sortMissingFirst")); assertTrue((Boolean)m.get("sortMissingLast")); m = getObj(harness, "name", "fields"); assertNotNull("'name' field does not exist in the schema", m); assertEquals("nametext", m.get("type")); m = getObj(harness, "bind", "fields"); assertNotNull("'bind' field does not exist in the schema", m); assertEquals("boolean", m.get("type")); m = getObj(harness, "attr_*", "dynamicFields"); assertNotNull("'attr_*' dynamic field does not exist in the schema", m); assertEquals("text", m.get("type")); List l = getSourceCopyFields(harness, "*_i"); Set s = new HashSet(); assertEquals(4, l.size()); s.add(((Map)l.get(0)).get("dest")); s.add(((Map)l.get(1)).get("dest")); s.add(((Map) l.get(2)).get("dest")); s.add(((Map) l.get(3)).get("dest")); assertTrue(s.contains("title")); assertTrue(s.contains("*_s")); String payload = "{\n" + " 'add-field' : {\n" + " 'name':'a1',\n" + " 'type': 'string',\n" + " 'stored':true,\n" + " 'indexed':false\n" + " },\n" + " 'add-field' : {\n" + " 'name':'a2',\n" + " 'type': 'string',\n" + " 'stored':true,\n" + " 'indexed':true\n" + " },\n" + " 'add-dynamic-field' : {\n" + " 'name' :'*_lol',\n" + " 'type':'string',\n" + " 'stored':true,\n" + " 'indexed':true\n" + " },\n" + " 'add-copy-field' : {\n" + " 'source' :'a1',\n" + " 'dest':['a2','hello_lol']\n" + " },\n" + " 'add-field-type' : {\n" + " 'name' :'mystr',\n" + " 'class' : 'solr.StrField',\n" + " 'sortMissingLast':'true'\n" + " },\n" + " 'add-field-type' : {" + " 'name' : 'myNewTxtField',\n" + " 'class':'solr.TextField',\n" + " 'positionIncrementGap':'100',\n" + " 'analyzer' : {\n" + " 'charFilters':[\n" + " {\n" + " 'class':'solr.PatternReplaceCharFilterFactory',\n" + " 'replacement':'$1$1',\n" + " 'pattern':'([a-zA-Z])\\\\\\\\1+'\n" + " }\n" + " ],\n" + " 'tokenizer':{'class':'solr.WhitespaceTokenizerFactory'},\n" + " 'filters':[\n" + " {\n" + " 'class':'solr.WordDelimiterFilterFactory',\n" + " 'preserveOriginal':'0'\n" + " },\n" + " {\n" + " 'class':'solr.StopFilterFactory',\n" + " 'words':'stopwords.txt',\n" + " 'ignoreCase':'true'\n" + " },\n" + " {'class':'solr.LowerCaseFilterFactory'},\n" + " {'class':'solr.ASCIIFoldingFilterFactory'},\n" + " {'class':'solr.KStemFilterFactory'}\n" + " ]\n" + " }\n" + " },\n"+ " 'add-field' : {\n" + " 'name':'a3',\n" + " 'type': 'myNewTxtField',\n" + " 'stored':true,\n" + " 'indexed':true\n" + " },\n" + " 'delete-field' : {'name':'wdf_nocase'},\n" + " 'delete-field-type' : {'name':'wdf_nocase'},\n" + " 'delete-dynamic-field' : {'name':'*_tt'},\n" + " 'delete-copy-field' : {'source':'a1', 'dest':'a2'},\n" + " 'delete-copy-field' : {'source':'*_i', 'dest':['title', '*_s']},\n" + " 'replace-field-type' : {\n" + " 'name':'boolean',\n" + " 'class':'solr.BoolField',\n" + " 'sortMissingFirst':true\n" + " },\n" + " 'replace-field' : {\n" + " 'name':'name',\n" + " 'type':'string',\n" + " 'indexed':true,\n" + " 'stored':true\n" + " },\n" + " 'replace-dynamic-field' : {\n" + " 'name':'attr_*',\n" + " 'type':'string',\n" + " 'indexed':true,\n" + " 'stored':true,\n" + " 'multiValued':true\n" + " }\n" + " }\n"; String response = harness.post("/schema?wt=json", json(payload)); Map map = (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response))); assertNull(response, map.get("errors")); m = getObj(harness, "a1", "fields"); assertNotNull("field a1 not created", m); assertEquals("string", m.get("type")); assertEquals(Boolean.TRUE, m.get("stored")); assertEquals(Boolean.FALSE, m.get("indexed")); m = getObj(harness,"a2", "fields"); assertNotNull("field a2 not created", m); assertEquals("string", m.get("type")); assertEquals(Boolean.TRUE, m.get("stored")); assertEquals(Boolean.TRUE, m.get("indexed")); m = getObj(harness,"*_lol", "dynamicFields"); assertNotNull("field *_lol not created", m); assertEquals("string", m.get("type")); assertEquals(Boolean.TRUE, m.get("stored")); assertEquals(Boolean.TRUE, m.get("indexed")); l = getSourceCopyFields(harness, "a1"); s = new HashSet(); assertEquals(1, l.size()); s.add(((Map) l.get(0)).get("dest")); assertTrue(s.contains("hello_lol")); l = getSourceCopyFields(harness, "*_i"); s = new HashSet(); assertEquals(2, l.size()); s.add(((Map)l.get(0)).get("dest")); s.add(((Map) l.get(1)).get("dest")); assertFalse(s.contains("title")); assertFalse(s.contains("*_s")); m = getObj(harness, "mystr", "fieldTypes"); assertNotNull(m); assertEquals("solr.StrField", m.get("class")); assertEquals("true", String.valueOf(m.get("sortMissingLast"))); m = getObj(harness, "myNewTxtField", "fieldTypes"); assertNotNull(m); m = getObj(harness, "a3", "fields"); assertNotNull("field a3 not created", m); assertEquals("myNewTxtField", m.get("type")); m = getObj(harness, "wdf_nocase", "fields"); assertNull("field 'wdf_nocase' not deleted", m); m = getObj(harness, "wdf_nocase", "fieldTypes"); assertNull("field type 'wdf_nocase' not deleted", m); m = getObj(harness, "*_tt", "dynamicFields"); assertNull("dynamic field '*_tt' not deleted", m); m = getObj(harness, "boolean", "fieldTypes"); assertNotNull("'boolean' field type does not exist in the schema", m); assertNull(m.get("sortMissingLast")); assertTrue((Boolean)m.get("sortMissingFirst")); m = getObj(harness, "bind", "fields"); // this field will be rebuilt when "boolean" field type is replaced assertNotNull("'bind' field does not exist in the schema", m); m = getObj(harness, "name", "fields"); assertNotNull("'name' field does not exist in the schema", m); assertEquals("string", m.get("type")); m = getObj(harness, "attr_*", "dynamicFields"); assertNotNull("'attr_*' dynamic field does not exist in the schema", m); assertEquals("string", m.get("type")); } public void testDeleteAndReplace() throws Exception { RestTestHarness harness = restTestHarness; Map map = getObj(harness, "NewField1", "fields"); assertNull("Field 'NewField1' already exists in the schema", map); map = getObj(harness, "NewField2", "fields"); assertNull("Field 'NewField2' already exists in the schema", map); map = getObj(harness, "NewFieldType", "fieldTypes"); assertNull("'NewFieldType' field type already exists in the schema", map); List list = getSourceCopyFields(harness, "NewField1"); assertEquals("There is already a copy field with source 'NewField1' in the schema", 0, list.size()); map = getObj(harness, "NewDynamicField1*", "dynamicFields"); assertNull("Dynamic field 'NewDynamicField1*' already exists in the schema", map); map = getObj(harness, "NewDynamicField2*", "dynamicFields"); assertNull("Dynamic field 'NewDynamicField2*' already exists in the schema", map); String cmds = "{\n" + " 'add-field-type': { 'name':'NewFieldType', 'class':'solr.StrField' },\n" + " 'add-field': [{ 'name':'NewField1', 'type':'NewFieldType' },\n" + " { 'name':'NewField2', 'type':'NewFieldType' },\n" + " { 'name':'NewField3', 'type':'NewFieldType' },\n" + " { 'name':'NewField4', 'type':'NewFieldType' }],\n" + " 'add-dynamic-field': [{ 'name':'NewDynamicField1*', 'type':'NewFieldType' },\n" + " { 'name':'NewDynamicField2*', 'type':'NewFieldType' },\n" + " { 'name':'NewDynamicField3*', 'type':'NewFieldType' }],\n" + " 'add-copy-field': [{'source':'NewField1', 'dest':['NewField2', 'NewDynamicField1A']},\n" + " {'source':'NewDynamicField1*', 'dest':'NewField2' },\n" + " {'source':'NewDynamicField2*', 'dest':'NewField2' },\n" + " {'source':'NewDynamicField3*', 'dest':'NewField3' },\n" + " {'source':'NewField4', 'dest':'NewField3' }]\n" + "}\n"; String response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); assertNull(response, map.get("errors")); map = getObj(harness, "NewFieldType", "fieldTypes"); assertNotNull("'NewFieldType' is not in the schema", map); map = getObj(harness, "NewField1", "fields"); assertNotNull("Field 'NewField1' is not in the schema", map); map = getObj(harness, "NewField2", "fields"); assertNotNull("Field 'NewField2' is not in the schema", map); map = getObj(harness, "NewField3", "fields"); assertNotNull("Field 'NewField3' is not in the schema", map); map = getObj(harness, "NewField4", "fields"); assertNotNull("Field 'NewField4' is not in the schema", map); list = getSourceCopyFields(harness, "NewField1"); Set set = new HashSet(); for (Object obj : list) { set.add(((Map)obj).get("dest")); } assertEquals(2, list.size()); assertTrue(set.contains("NewField2")); assertTrue(set.contains("NewDynamicField1A")); list = getSourceCopyFields(harness, "NewDynamicField1*"); assertEquals(1, list.size()); assertEquals("NewField2", ((Map)list.get(0)).get("dest")); list = getSourceCopyFields(harness, "NewDynamicField2*"); assertEquals(1, list.size()); assertEquals("NewField2", ((Map)list.get(0)).get("dest")); list = getSourceCopyFields(harness, "NewDynamicField3*"); assertEquals(1, list.size()); assertEquals("NewField3", ((Map)list.get(0)).get("dest")); list = getSourceCopyFields(harness, "NewField4"); assertEquals(1, list.size()); assertEquals("NewField3", ((Map)list.get(0)).get("dest")); cmds = "{'delete-field-type' : {'name':'NewFieldType'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); Object errors = map.get("errors"); assertNotNull(errors); assertTrue(errors.toString().contains("Can't delete 'NewFieldType' because it's the field type of ")); cmds = "{'delete-field' : {'name':'NewField1'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); errors = map.get("errors"); assertNotNull(errors); assertTrue(errors.toString().contains ("Can't delete field 'NewField1' because it's referred to by at least one copy field directive")); cmds = "{'delete-field' : {'name':'NewField2'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); errors = map.get("errors"); assertNotNull(errors); assertTrue(errors.toString().contains ("Can't delete field 'NewField2' because it's referred to by at least one copy field directive")); cmds = "{'replace-field' : {'name':'NewField1', 'type':'string'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); assertNull(map.get("errors")); // Make sure the copy field directives with source NewField1 are preserved list = getSourceCopyFields(harness, "NewField1"); set = new HashSet(); for (Object obj : list) { set.add(((Map)obj).get("dest")); } assertEquals(2, list.size()); assertTrue(set.contains("NewField2")); assertTrue(set.contains("NewDynamicField1A")); cmds = "{'delete-dynamic-field' : {'name':'NewDynamicField1*'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); errors = map.get("errors"); assertNotNull(errors); assertTrue(errors.toString().contains ("copyField dest :'NewDynamicField1A' is not an explicit field and doesn't match a dynamicField.")); cmds = "{'replace-field' : {'name':'NewField2', 'type':'string'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); errors = map.get("errors"); assertNull(errors); // Make sure the copy field directives with destination NewField2 are preserved list = getDestCopyFields(harness, "NewField2"); set = new HashSet(); for (Object obj : list) { set.add(((Map)obj).get("source")); } assertEquals(3, list.size()); assertTrue(set.contains("NewField1")); assertTrue(set.contains("NewDynamicField1*")); assertTrue(set.contains("NewDynamicField2*")); cmds = "{'replace-dynamic-field' : {'name':'NewDynamicField2*', 'type':'string'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); errors = map.get("errors"); assertNull(errors); // Make sure the copy field directives with source NewDynamicField2* are preserved list = getSourceCopyFields(harness, "NewDynamicField2*"); assertEquals(1, list.size()); assertEquals("NewField2", ((Map) list.get(0)).get("dest")); cmds = "{'replace-dynamic-field' : {'name':'NewDynamicField1*', 'type':'string'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); errors = map.get("errors"); assertNull(errors); // Make sure the copy field directives with destinations matching NewDynamicField1* are preserved list = getDestCopyFields(harness, "NewDynamicField1A"); assertEquals(1, list.size()); assertEquals("NewField1", ((Map) list.get(0)).get("source")); cmds = "{'replace-field-type': {'name':'NewFieldType', 'class':'solr.BinaryField'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); assertNull(map.get("errors")); // Make sure the copy field directives with sources and destinations of type NewFieldType are preserved list = getDestCopyFields(harness, "NewField3"); assertEquals(2, list.size()); set = new HashSet(); for (Object obj : list) { set.add(((Map)obj).get("source")); } assertTrue(set.contains("NewField4")); assertTrue(set.contains("NewDynamicField3*")); cmds = "{\n" + " 'delete-copy-field': [{'source':'NewField1', 'dest':['NewField2', 'NewDynamicField1A']},\n" + " {'source':'NewDynamicField1*', 'dest':'NewField2' },\n" + " {'source':'NewDynamicField2*', 'dest':'NewField2' },\n" + " {'source':'NewDynamicField3*', 'dest':'NewField3' },\n" + " {'source':'NewField4', 'dest':'NewField3' }]\n" + "}\n"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); assertNull(map.get("errors")); list = getSourceCopyFields(harness, "NewField1"); assertEquals(0, list.size()); list = getSourceCopyFields(harness, "NewDynamicField1*"); assertEquals(0, list.size()); list = getSourceCopyFields(harness, "NewDynamicField2*"); assertEquals(0, list.size()); list = getSourceCopyFields(harness, "NewDynamicField3*"); assertEquals(0, list.size()); list = getSourceCopyFields(harness, "NewField4"); assertEquals(0, list.size()); cmds = "{'delete-field': [{'name':'NewField1'},{'name':'NewField2'},{'name':'NewField3'},{'name':'NewField4'}]}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); assertNull(map.get("errors")); cmds = "{'delete-dynamic-field': [{'name':'NewDynamicField1*'}," + " {'name':'NewDynamicField2*'},\n" + " {'name':'NewDynamicField3*'}]\n" + "}\n"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); assertNull(map.get("errors")); cmds = "{'delete-field-type':{'name':'NewFieldType'}}"; response = harness.post("/schema?wt=json", json(cmds)); map = (Map)ObjectBuilder.getVal(new JSONParser(new StringReader(response))); assertNull(map.get("errors")); } public static Map getObj(RestTestHarness restHarness, String fld, String key) throws Exception { Map map = getRespMap(restHarness); List l = (List) ((Map)map.get("schema")).get(key); for (Object o : l) { Map m = (Map) o; if (fld.equals(m.get("name"))) return m; } return null; } public static Map getRespMap(RestTestHarness restHarness) throws Exception { return getAsMap("/schema?wt=json", restHarness); } public static Map getAsMap(String uri, RestTestHarness restHarness) throws Exception { String response = restHarness.query(uri); return (Map) ObjectBuilder.getVal(new JSONParser(new StringReader(response))); } public static List getSourceCopyFields(RestTestHarness harness, String src) throws Exception { Map map = getRespMap(harness); List l = (List) ((Map)map.get("schema")).get("copyFields"); List result = new ArrayList(); for (Object o : l) { Map m = (Map) o; if (src.equals(m.get("source"))) result.add(m); } return result; } public static List getDestCopyFields(RestTestHarness harness, String dest) throws Exception { Map map = getRespMap(harness); List l = (List) ((Map)map.get("schema")).get("copyFields"); List result = new ArrayList(); for (Object o : l) { Map m = (Map) o; if (dest.equals(m.get("dest"))) result.add(m); } return result; } }
/* * Copyright (c) 2016, Salesforce.com, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * 3. Neither the name of Salesforce.com nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package com.salesforce.dva.argus.ws.dto; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.salesforce.dva.argus.entity.Notification; import com.salesforce.dva.argus.entity.Trigger; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response.Status; /** * Notification Dto. * * @author Raj Sarkapally (rsarkapally@salesforce.com) */ @SuppressWarnings("serial") @JsonIgnoreProperties(ignoreUnknown = true) public class NotificationDto extends EntityDTO { //~ Instance fields ****************************************************************************************************************************** private String name; private String notifierName; private List<String> subscriptions; private List<String> metricsToAnnotate; private long cooldownPeriod; private long cooldownExpiration; private List<BigInteger> triggersIds = new ArrayList<>(); private BigInteger alertId; //~ Methods ************************************************************************************************************************************** /** * Converts notification entity object to notificationDto object. * * @param notification notification entity. Cannot be null. * * @return notificationDto. * * @throws WebApplicationException If an error occurs. */ public static NotificationDto transformToDto(Notification notification) { if (notification == null) { throw new WebApplicationException("Null entity object cannot be converted to Dto object.", Status.INTERNAL_SERVER_ERROR); } NotificationDto result = createDtoObject(NotificationDto.class, notification); result.setAlertId(notification.getAlert().getId()); for (Trigger trigger : notification.getTriggers()) { result.addTriggersIds(trigger); } return result; } /** * Converts list of notification entity objects to list of notificationDto objects. * * @param notifications list of notification objects. Cannot be null. * * @return list of notificationDto objects * * @throws WebApplicationException If an error occurs. */ public static List<NotificationDto> transformToDto(List<Notification> notifications) { if (notifications == null) { throw new WebApplicationException("Null entity object cannot be converted to Dto object.", Status.INTERNAL_SERVER_ERROR); } List<NotificationDto> result = new ArrayList<NotificationDto>(); for (Notification notification : notifications) { result.add(transformToDto(notification)); } return result; } //~ Methods ************************************************************************************************************************************** /** * Returns the notification name. * * @return The notification name. */ public String getName() { return name; } /** * Sets the notification name. * * @param name The notification name. */ public void setName(String name) { this.name = name; } /** * Returns the notifier name. * * @return The notifier name. */ public String getNotifierName() { return notifierName; } /** * Sets the notifier name. * * @param notifierName The notifier name. */ public void setNotifierName(String notifierName) { this.notifierName = notifierName; } /** * Returns the subscriptions. * * @return The subscriptions */ public List<String> getSubscriptions() { return subscriptions; } /** * Sets the subscriptions. * * @param subscriptions The subscriptions */ public void setSubscriptions(List<String> subscriptions) { this.subscriptions = subscriptions; } /** * Returns the list of metric names that should be annotated. * * @return The list of metric names. */ public List<String> getMetricsToAnnotate() { return metricsToAnnotate; } /** * Sets the list of metrics that should be annotated. * * @param metricsToAnnotate The list of metric names. */ public void setMetricsToAnnotate(List<String> metricsToAnnotate) { this.metricsToAnnotate = metricsToAnnotate; } /** * Returns the cool down period. * * @return The cool down period. */ public long getCooldownPeriod() { return cooldownPeriod; } /** * Sets the cool down period. * * @param cooldownPeriod The cool down period. */ public void setCooldownPeriod(long cooldownPeriod) { this.cooldownPeriod = cooldownPeriod; } /** * Returns the cool down expiration time in milliseconds. * * @return The cool down expiration time */ public long getCooldownExpiration() { return cooldownExpiration; } /** * Sets the cool down expiration time. * * @param cooldownExpiration The cool down expiration time */ public void setCooldownExpiration(long cooldownExpiration) { this.cooldownExpiration = cooldownExpiration; } /** * Returns the list of trigger Ids. * * @return The list of trigger Ids. */ public List<BigInteger> getTriggersIds() { return triggersIds; } /** * Adds the trigger to notification. * * @param trigger The trigger. */ public void addTriggersIds(Trigger trigger) { this.getTriggersIds().add(trigger.getId()); } /** * Returns the alert Id. * * @return The alert Id. */ public BigInteger getAlertId() { return alertId; } /** * Sets the alert Id. * * @param alertId The alert Id. */ public void setAlertId(BigInteger alertId) { this.alertId = alertId; } @Override public Object createExample() { NotificationDto result = new NotificationDto(); result.setAlertId(BigInteger.ONE); result.setCooldownExpiration(System.currentTimeMillis() + 300000); result.setCooldownPeriod(300000); result.setCreatedById(BigInteger.TEN); result.setCreatedDate(new Date()); result.setId(BigInteger.ONE); result.setMetricsToAnnotate(Arrays.asList(new String[] { "scope:metric{tagk=tagv}" })); result.setModifiedById(BigInteger.TEN); result.setModifiedDate(new Date()); result.setName("sample-notification"); result.setNotifierName("email"); result.setSubscriptions(Arrays.asList(new String[] { "joe.smith@salesforce.com" })); return result; } } /* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.core; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Terms; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericIntegerAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.query.QueryParseContext; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeByteValue; import static org.elasticsearch.index.mapper.MapperBuilders.byteField; import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField; /** * */ public class ByteFieldMapper extends NumberFieldMapper { public static final String CONTENT_TYPE = "byte"; public static class Defaults extends NumberFieldMapper.Defaults { public static final MappedFieldType FIELD_TYPE = new ByteFieldType(); static { FIELD_TYPE.freeze(); } } public static class Builder extends NumberFieldMapper.Builder<Builder, ByteFieldMapper> { public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_8_BIT); builder = this; } @Override public ByteFieldMapper build(BuilderContext context) { setupFieldType(context); ByteFieldMapper fieldMapper = new ByteFieldMapper(fieldType, docValues, ignoreMalformed(context), coerce(context), fieldDataSettings, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); fieldMapper.includeInAll(includeInAll); return fieldMapper; } @Override protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { String name = precisionStep == Integer.MAX_VALUE ? "_byte/max" : ("_byte/" + precisionStep); return new NamedAnalyzer(name, new NumericIntegerAnalyzer(precisionStep)); } @Override protected int maxPrecisionStep() { return 32; } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { ByteFieldMapper.Builder builder = byteField(name); parseNumberField(builder, name, node, parserContext); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String propName = Strings.toUnderscoreCase(entry.getKey()); Object propNode = entry.getValue(); if (propName.equals("null_value")) { if (propNode == null) { throw new MapperParsingException("Property [null_value] cannot be null."); } builder.nullValue(nodeByteValue(propNode)); iterator.remove(); } } return builder; } } static final class ByteFieldType extends NumberFieldType { public ByteFieldType() {} protected ByteFieldType(ByteFieldType ref) { super(ref); } @Override public NumberFieldType clone() { return new ByteFieldType(this); } @Override public Byte nullValue() { return (Byte)super.nullValue(); } @Override public Byte value(Object value) { if (value == null) { return null; } if (value instanceof Number) { return ((Number) value).byteValue(); } if (value instanceof BytesRef) { return ((BytesRef) value).bytes[((BytesRef) value).offset]; } return Byte.parseByte(value.toString()); } @Override public BytesRef indexedValueForSearch(Object value) { BytesRefBuilder bytesRef = new BytesRefBuilder(); NumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match return bytesRef.get(); } @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) { return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), lowerTerm == null ? null : (int)parseValue(lowerTerm), upperTerm == null ? null : (int)parseValue(upperTerm), includeLower, includeUpper); } @Override public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) { byte iValue = Byte.parseByte(value); byte iSim = fuzziness.asByte(); return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(), iValue - iSim, iValue + iSim, true, true); } @Override public FieldStats stats(Terms terms, int maxDoc) throws IOException { long minValue = NumericUtils.getMinInt(terms); long maxValue = NumericUtils.getMaxInt(terms); return new FieldStats.Long( maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue ); } } protected ByteFieldMapper(MappedFieldType fieldType, Boolean docValues, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, @Nullable Settings fieldDataSettings, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(fieldType, docValues, ignoreMalformed, coerce, fieldDataSettings, indexSettings, multiFields, copyTo); } @Override public ByteFieldType fieldType() { return (ByteFieldType)fieldType; } @Override public MappedFieldType defaultFieldType() { return Defaults.FIELD_TYPE; } @Override public FieldDataType defaultFieldDataType() { return new FieldDataType("byte"); } private static byte parseValue(Object value) { if (value instanceof Number) { return ((Number) value).byteValue(); } if (value instanceof BytesRef) { return Byte.parseByte(((BytesRef) value).utf8ToString()); } return Byte.parseByte(value.toString()); } @Override protected boolean customBoost() { return true; } @Override protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException { byte value; float boost = this.fieldType.boost(); if (context.externalValueSet()) { Object externalValue = context.externalValue(); if (externalValue == null) { if (fieldType().nullValue() == null) { return; } value = fieldType().nullValue(); } else if (externalValue instanceof String) { String sExternalValue = (String) externalValue; if (sExternalValue.length() == 0) { if (fieldType().nullValue() == null) { return; } value = fieldType().nullValue(); } else { value = Byte.parseByte(sExternalValue); } } else { value = ((Number) externalValue).byteValue(); } if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(fieldType.names().fullName(), Byte.toString(value), boost); } } else { XContentParser parser = context.parser(); if (parser.currentToken() == XContentParser.Token.VALUE_NULL || (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { if (fieldType().nullValue() == null) { return; } value = fieldType().nullValue(); if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { context.allEntries().addText(fieldType.names().fullName(), fieldType().nullValueAsString(), boost); } } else if (parser.currentToken() == XContentParser.Token.START_OBJECT) { XContentParser.Token token; String currentFieldName = null; Byte objValue = fieldType().nullValue(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { objValue = (byte) parser.shortValue(coerce.value()); } } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { boost = parser.floatValue(); } else { throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); } } } if (objValue == null) { // no value return; } value = objValue; } else { value = (byte) parser.shortValue(coerce.value()); if (context.includeInAll(includeInAll, this)) { context.allEntries().addText(fieldType.names().fullName(), parser.text(), boost); } } } if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { CustomByteNumericField field = new CustomByteNumericField(this, value, fieldType); field.setBoost(boost); fields.add(field); } if (fieldType().hasDocValues()) { addDocValue(context, fields, value); } } @Override protected String contentType() { return CONTENT_TYPE; } @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { super.merge(mergeWith, mergeResult); if (!this.getClass().equals(mergeWith.getClass())) { return; } if (!mergeResult.simulate()) { this.fieldType = this.fieldType.clone(); this.fieldType.setNullValue(((ByteFieldMapper) mergeWith).fieldType().nullValue()); this.fieldType.freeze(); } } @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); if (includeDefaults || fieldType.numericPrecisionStep() != Defaults.PRECISION_STEP_8_BIT) { builder.field("precision_step", fieldType.numericPrecisionStep()); } if (includeDefaults || fieldType().nullValue() != null) { builder.field("null_value", fieldType().nullValue()); } if (includeInAll != null) { builder.field("include_in_all", includeInAll); } else if (includeDefaults) { builder.field("include_in_all", false); } } public static class CustomByteNumericField extends CustomNumericField { private final byte number; private final NumberFieldMapper mapper; public CustomByteNumericField(NumberFieldMapper mapper, byte number, MappedFieldType fieldType) { super(mapper, number, fieldType); this.mapper = mapper; this.number = number; } @Override public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { if (fieldType().indexOptions() != IndexOptions.NONE) { return mapper.popCachedStream().setIntValue(number); } return null; } @Override public String numericAsString() { return Byte.toString(number); } } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.util.gui.resource; import java.net.URL; import java.util.Iterator; import java.util.List; import java.util.MissingResourceException; import java.util.ResourceBundle; import javax.swing.AbstractButton; import javax.swing.Action; import javax.swing.ButtonGroup; import javax.swing.ImageIcon; import javax.swing.JCheckBoxMenuItem; import javax.swing.JComponent; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JRadioButtonMenuItem; import javax.swing.JSeparator; import javax.swing.KeyStroke; /** * This class represents a menu factory which builds * menubars and menus from the content of a resource file. <br> * * The resource entries format is (for a menubar named 'MenuBar'):<br> * <pre> * MenuBar = Menu1 Menu2 ... * * Menu1.type = RADIO | CHECK | MENU | ITEM * Menu1 = Item1 Item2 - Item3 ... * Menu1.text = text * Menu1.icon = icon_name * Menu1.mnemonic = mnemonic * Menu1.accelerator = accelerator * Menu1.action = action_name * Menu1.selected = true | false * Menu1.enabled = true | false * ... * mnemonic is a single character * accelerator is of the form described in {@link javax.swing.KeyStroke#getKeyStroke(String)}. * '-' represents a separator * </pre> * All entries are optional except the '.type' entry * Consecutive RADIO items are put in a ButtonGroup * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id: MenuFactory.java 498290 2007-01-21 11:44:05Z cam $ */ public class MenuFactory extends ResourceManager { // Constants // private static final String TYPE_MENU = "MENU"; private static final String TYPE_ITEM = "ITEM"; private static final String TYPE_RADIO = "RADIO"; private static final String TYPE_CHECK = "CHECK"; private static final String SEPARATOR = "-"; private static final String TYPE_SUFFIX = ".type"; private static final String TEXT_SUFFIX = ".text"; private static final String MNEMONIC_SUFFIX = ".mnemonic"; private static final String ACCELERATOR_SUFFIX = ".accelerator"; private static final String ACTION_SUFFIX = ".action"; private static final String SELECTED_SUFFIX = ".selected"; private static final String ENABLED_SUFFIX = ".enabled"; private static final String ICON_SUFFIX = ".icon"; /** * The table which contains the actions */ private ActionMap actions; /** * The current radio group */ private ButtonGroup buttonGroup; /** * Creates a new menu factory * @param rb the resource bundle that contains the menu bar * description. * @param am the actions to add to menu items */ public MenuFactory(ResourceBundle rb, ActionMap am) { super(rb); actions = am; buttonGroup = null; } /** * Creates and returns a swing menu bar * @param name the name of the menu bar in the resource bundle * @throws MissingResourceException if one of the keys that compose the * menu is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character and if the accelerator is malformed * @throws MissingListenerException if an item action is not found in the * action map */ public JMenuBar createJMenuBar(String name) throws MissingResourceException, ResourceFormatException, MissingListenerException { return createJMenuBar(name, null); } /** * Creates and returns a swing menu bar * @param name the name of the menu bar in the resource bundle * @param specialization the name of the specialization to look for * @throws MissingResourceException if one of the keys that compose the * menu is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character and if the accelerator is malformed * @throws MissingListenerException if an item action is not found in the * action map */ public JMenuBar createJMenuBar(String name, String specialization) throws MissingResourceException, ResourceFormatException, MissingListenerException { JMenuBar result = new JMenuBar(); List menus = getSpecializedStringList(name, specialization); Iterator it = menus.iterator(); while (it.hasNext()) { result.add(createJMenuComponent((String)it.next(), specialization)); } return result; } /** * Gets a possibly specialized resource string. * This will first look for * <code>name + '.' + specialization</code>, and if that resource * doesn't exist, <code>name</code>. */ protected String getSpecializedString(String name, String specialization) { String s; try { s = getString(name + '.' + specialization); } catch (MissingResourceException mre) { s = getString(name); } return s; } /** * Gets a possibly specialized resource string list. * This will first look for * <code>name + '.' + specialization</code>, and if that resource * doesn't exist, <code>name</code>. */ protected List getSpecializedStringList(String name, String specialization) { List l; try { l = getStringList(name + '.' + specialization); } catch (MissingResourceException mre) { l = getStringList(name); } return l; } /** * Gets a possibly specialized resource boolean. * This will first look for * <code>name + '.' + specialization</code>, and if that resource * doesn't exist, <code>name</code>. */ protected boolean getSpecializedBoolean(String name, String specialization) { boolean b; try { b = getBoolean(name + '.' + specialization); } catch (MissingResourceException mre) { b = getBoolean(name); } return b; } /** * Creates and returns a menu item or a separator * @param name the name of the menu item or "-" to create a separator * @param specialization the name of the specialization to look for * @throws MissingResourceException if key is not the name of a menu item. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException in case of malformed entry * @throws MissingListenerException if an item action is not found in the * action map */ protected JComponent createJMenuComponent(String name, String specialization) throws MissingResourceException, ResourceFormatException, MissingListenerException { if (name.equals(SEPARATOR)) { buttonGroup = null; return new JSeparator(); } String type = getSpecializedString(name + TYPE_SUFFIX, specialization); JComponent item = null; if (type.equals(TYPE_RADIO)) { if (buttonGroup == null) { buttonGroup = new ButtonGroup(); } } else { buttonGroup = null; } if (type.equals(TYPE_MENU)) { item = createJMenu(name, specialization); } else if (type.equals(TYPE_ITEM)) { item = createJMenuItem(name, specialization); } else if (type.equals(TYPE_RADIO)) { item = createJRadioButtonMenuItem(name, specialization); buttonGroup.add((AbstractButton)item); } else if (type.equals(TYPE_CHECK)) { item = createJCheckBoxMenuItem(name, specialization); } else { throw new ResourceFormatException("Malformed resource", bundle.getClass().getName(), name+TYPE_SUFFIX); } return item; } /** * Creates and returns a new swing menu * @param name the name of the menu bar in the resource bundle * @throws MissingResourceException if one of the keys that compose the * menu is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character. * @throws MissingListenerException if a item action is not found in the * action map. */ public JMenu createJMenu(String name) throws MissingResourceException, ResourceFormatException, MissingListenerException { return createJMenu(name, null); } /** * Creates and returns a new swing menu * @param name the name of the menu bar in the resource bundle * @param specialization the name of the specialization to look for * @throws MissingResourceException if one of the keys that compose the * menu is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character. * @throws MissingListenerException if a item action is not found in the * action map. */ public JMenu createJMenu(String name, String specialization) throws MissingResourceException, ResourceFormatException, MissingListenerException { JMenu result = new JMenu(getSpecializedString(name + TEXT_SUFFIX, specialization)); initializeJMenuItem(result, name, specialization); List items = getSpecializedStringList(name, specialization); Iterator it = items.iterator(); while (it.hasNext()) { result.add(createJMenuComponent((String)it.next(), specialization)); } return result; } /** * Creates and returns a new swing menu item * @param name the name of the menu item * @throws MissingResourceException if one of the keys that compose the * menu item is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character. * @throws MissingListenerException if then item action is not found in * the action map. */ public JMenuItem createJMenuItem(String name) throws MissingResourceException, ResourceFormatException, MissingListenerException { return createJMenuItem(name, null); } /** * Creates and returns a new swing menu item * @param name the name of the menu item * @param specialization the name of the specialization to look for * @throws MissingResourceException if one of the keys that compose the * menu item is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character. * @throws MissingListenerException if then item action is not found in * the action map. */ public JMenuItem createJMenuItem(String name, String specialization) throws MissingResourceException, ResourceFormatException, MissingListenerException { JMenuItem result = new JMenuItem(getSpecializedString(name + TEXT_SUFFIX, specialization)); initializeJMenuItem(result, name, specialization); return result; } /** * Creates and returns a new swing radio button menu item * @param name the name of the menu item * @throws MissingResourceException if one of the keys that compose the * menu item is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character. * @throws MissingListenerException if then item action is not found in * the action map. */ public JRadioButtonMenuItem createJRadioButtonMenuItem(String name) throws MissingResourceException, ResourceFormatException, MissingListenerException { return createJRadioButtonMenuItem(name, null); } /** * Creates and returns a new swing radio button menu item * @param name the name of the menu item * @param specialization the name of the specialization to look for * @throws MissingResourceException if one of the keys that compose the * menu item is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character. * @throws MissingListenerException if then item action is not found in * the action map. */ public JRadioButtonMenuItem createJRadioButtonMenuItem (String name, String specialization) throws MissingResourceException, ResourceFormatException, MissingListenerException { JRadioButtonMenuItem result; result = new JRadioButtonMenuItem (getSpecializedString(name + TEXT_SUFFIX, specialization)); initializeJMenuItem(result, name, specialization); // is the item selected? try { result.setSelected(getSpecializedBoolean(name + SELECTED_SUFFIX, specialization)); } catch (MissingResourceException e) { } return result; } /** * Creates and returns a new swing check box menu item * @param name the name of the menu item * @throws MissingResourceException if one of the keys that compose the * menu item is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character. * @throws MissingListenerException if then item action is not found in * the action map. */ public JCheckBoxMenuItem createJCheckBoxMenuItem(String name) throws MissingResourceException, ResourceFormatException, MissingListenerException { return createJCheckBoxMenuItem(name, null); } /** * Creates and returns a new swing check box menu item * @param name the name of the menu item * @param specialization the name of the specialization to look for * @throws MissingResourceException if one of the keys that compose the * menu item is missing. * It is not thrown if the mnemonic, the accelerator and the * action keys are missing * @throws ResourceFormatException if the mnemonic is not a single * character. * @throws MissingListenerException if then item action is not found in * the action map. */ public JCheckBoxMenuItem createJCheckBoxMenuItem(String name, String specialization) throws MissingResourceException, ResourceFormatException, MissingListenerException { JCheckBoxMenuItem result; result = new JCheckBoxMenuItem(getSpecializedString(name + TEXT_SUFFIX, specialization)); initializeJMenuItem(result, name, specialization); // is the item selected? try { result.setSelected(getSpecializedBoolean(name + SELECTED_SUFFIX, specialization)); } catch (MissingResourceException e) { } return result; } /** * Initializes a swing menu item * @param item the menu item to initialize * @param name the name of the menu item * @param specialization the name of the specialization to look for * @throws ResourceFormatException if the mnemonic is not a single * character. * @throws MissingListenerException if then item action is not found in * the action map. */ protected void initializeJMenuItem(JMenuItem item, String name, String specialization) throws ResourceFormatException, MissingListenerException { // Action try { Action a = actions.getAction (getSpecializedString(name + ACTION_SUFFIX, specialization)); if (a == null) { throw new MissingListenerException("", "Action", name+ACTION_SUFFIX); } item.setAction(a); item.setText(getSpecializedString(name + TEXT_SUFFIX, specialization)); if (a instanceof JComponentModifier) { ((JComponentModifier)a).addJComponent(item); } } catch (MissingResourceException e) { } // Icon try { String s = getSpecializedString(name + ICON_SUFFIX, specialization); URL url = actions.getClass().getResource(s); if (url != null) { item.setIcon(new ImageIcon(url)); } } catch (MissingResourceException e) { } // Mnemonic try { String str = getSpecializedString(name + MNEMONIC_SUFFIX, specialization); if (str.length() == 1) { item.setMnemonic(str.charAt(0)); } else { throw new ResourceFormatException("Malformed mnemonic", bundle.getClass().getName(), name+MNEMONIC_SUFFIX); } } catch (MissingResourceException e) { } // Accelerator try { if (!(item instanceof JMenu)) { String str = getSpecializedString(name + ACCELERATOR_SUFFIX, specialization); KeyStroke ks = KeyStroke.getKeyStroke(str); if (ks != null) { item.setAccelerator(ks); } else { throw new ResourceFormatException ("Malformed accelerator", bundle.getClass().getName(), name+ACCELERATOR_SUFFIX); } } } catch (MissingResourceException e) { } // is the item enabled? try { item.setEnabled(getSpecializedBoolean(name + ENABLED_SUFFIX, specialization)); } catch (MissingResourceException e) { } } }
package com.fly.firefly.ui.fragment.Register; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.fly.firefly.FireFlyApplication; import com.fly.firefly.R; import com.fly.firefly.base.BaseFragment; import com.fly.firefly.ui.activity.FragmentContainerActivity; import com.fly.firefly.ui.activity.Object.DatePickerObj; import com.fly.firefly.ui.activity.Picker.CountryListDialogFragment; import com.fly.firefly.ui.activity.Picker.DatePickerFragment; import com.fly.firefly.ui.module.RegisterModule; import com.fly.firefly.ui.presenter.RegisterPresenter; import com.fly.firefly.utils.DropDownItem; import java.util.ArrayList; import javax.inject.Inject; import butterknife.ButterKnife; import butterknife.InjectView; public class RegisterFragment extends BaseFragment implements RegisterPresenter.RegisterView { @Inject RegisterPresenter presenter; @InjectView(R.id.registerIndicator1) LinearLayout indicator1; @InjectView(R.id.registerIndicator2) LinearLayout indicator2; @InjectView(R.id.registerIndicator3) LinearLayout indicator3; @InjectView(R.id.registerBasicInfoBlock) LinearLayout registerPersonalInfoBlock; @InjectView(R.id.registerAddressInfoBlock) LinearLayout registerAddressBlock; @InjectView(R.id.registerContactInfoBlock) LinearLayout registerContactBlock; @InjectView(R.id.registerContinueButton) Button registerContinueButton; @InjectView(R.id.imageViewRegisterIndicator) ImageView imageRegisterIndicator; @InjectView(R.id.editTextCountry) TextView editTextCountry; @InjectView(R.id.editTextState) TextView editTextState; @InjectView(R.id.txtRegisterDatePicker) TextView txtRegisterDatePicker; private int currentPage; private ArrayList<DropDownItem> countrys; private ArrayList<DropDownItem> state; private int day; private int month; private int year; private int fragmentContainerId; public static RegisterFragment newInstance() { RegisterFragment fragment = new RegisterFragment(); Bundle args = new Bundle(); fragment.setArguments(args); return fragment; // new SearchFragment(); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); FireFlyApplication.get(getActivity()).createScopedGraph(new RegisterModule(this)).inject(this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container,Bundle savedInstanceState) { final View view = inflater.inflate(R.layout.register, container, false); ButterKnife.inject(this, view); countrys = new ArrayList<DropDownItem>(); state = new ArrayList<DropDownItem>(); /*Add Country To DropDown List*/ for(int x = 0 ; x < 15 ; x++) { DropDownItem itemCountry = new DropDownItem(); itemCountry.setText(Integer.toString(x)); itemCountry.setCode(Integer.toString(x)); itemCountry.setTag("Country"); countrys.add(itemCountry); } for(int x = 0 ; x < 20 ; x++) { DropDownItem itemCountry = new DropDownItem(); itemCountry.setText(Integer.toString(x)+"State"); itemCountry.setCode(Integer.toString(x)); itemCountry.setTag("State"); state.add(itemCountry); } /*Switch register info block*/ editTextCountry.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showCountrySelector(getActivity(),countrys); } }); editTextState.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showCountrySelector(getActivity(),state); } }); txtRegisterDatePicker.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showTimePickerDialog(view); } }); /*Initial indicator*/ imageRegisterIndicator.setBackgroundResource(R.drawable.register_account_focus); currentPage = 1; /*Switch register info block*/ indicator1.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showHiddenBlock(1); } }); indicator2.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showHiddenBlock(2); } }); indicator3.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showHiddenBlock(3); } }); registerContinueButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { showHiddenBlock(currentPage+1); } }); return view; } public void showTimePickerDialog(View v) { DialogFragment newFragment = new DatePickerFragment(); newFragment.setTargetFragment(RegisterFragment.this, 0); newFragment.show(getFragmentManager(), "datePicker"); } public void showCountrySelector(Activity act,ArrayList constParam) { if(act != null) { try { android.support.v4.app.FragmentManager fm = getActivity().getSupportFragmentManager(); CountryListDialogFragment countryListDialogFragment = CountryListDialogFragment.newInstance(constParam); countryListDialogFragment.setTargetFragment(RegisterFragment.this, 0); countryListDialogFragment.show(fm, "countryListDialogFragment"); } catch (Exception e) { e.printStackTrace(); } } } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { Log.e("enter here","ok"); if (resultCode != Activity.RESULT_OK) { return; } else { if (requestCode == 1) { // if (requestCode == 1) { Log.e("requestCode", "1"); DropDownItem selectedCountry = data.getParcelableExtra(CountryListDialogFragment.EXTRA_COUNTRY); if (selectedCountry.getTag() == "Country") { editTextCountry.setText(selectedCountry.getText()); } else { editTextState.setText(selectedCountry.getText()); } // } else if (requestCode == 0) { // Log.e("requestCode","0"); //controllerMessages.setVisibility(LinearLayout.GONE); // } }else{ DatePickerObj date = (DatePickerObj)data.getSerializableExtra(DatePickerFragment.EXTRA_DATE); String month = getMonthAlphabet(date.getMonth()); Log.e("Date Picker",Integer.toString(date.getMonth())); txtRegisterDatePicker.setText(date.getDay()+" "+month+" "+date.getYear()); } } } /*Change Field Block*/ public void showHiddenBlock(int page) { if(page == 1){ imageRegisterIndicator.setBackgroundResource(R.drawable.register_account_focus); resetRegisterFieldBlockVisibility(); registerPersonalInfoBlock.setVisibility(View.VISIBLE); currentPage = 1; } else if(page == 2){ imageRegisterIndicator.setBackgroundResource(R.drawable.register_address_focus); resetRegisterFieldBlockVisibility(); registerAddressBlock.setVisibility(View.VISIBLE); currentPage = 2; } else if(page == 3){ imageRegisterIndicator.setBackgroundResource(R.drawable.register_contact_focus); resetRegisterFieldBlockVisibility(); registerContactBlock.setVisibility(View.VISIBLE); currentPage = 3; } else if(page == 4){ Log.e("READY FOR VALIDATION","TRUE"); } else { getActivity().finish(); } } /*Set all block visibility to - GONE*/ public void resetRegisterFieldBlockVisibility(){ registerPersonalInfoBlock.setVisibility(View.GONE); registerAddressBlock.setVisibility(View.GONE); registerContactBlock.setVisibility(View.GONE); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); fragmentContainerId = ((FragmentContainerActivity) getActivity()).getFragmentContainerId(); } @Override public void onResume() { super.onResume(); presenter.onResume(); } @Override public void onPause() { super.onPause(); presenter.onPause(); } public void registerBackFunction() { showHiddenBlock(currentPage-1); } }
/* * Copyright 2022 The Hekate Project * * The Hekate Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.hekate.network.address; import io.hekate.HekateTestBase; import org.junit.Test; import static io.hekate.network.address.AddressPatternOpts.parse; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertSame; public class AddressPatternOptsTest extends HekateTestBase { @Test public void testAny() { AddressPatternOpts opts = parse("any"); assertNull(opts.ipVersion()); assertNull(opts.exactAddress()); assertAllPatternsAreNull(opts); } @Test public void testAnyIp4() { AddressPatternOpts opts = parse("any-ip4"); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertAllPatternsAreNull(opts); opts = parse(" any-ip4 "); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertAllPatternsAreNull(opts); } @Test public void testAnyIp6() { AddressPatternOpts opts = parse("any-ip6"); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertAllPatternsAreNull(opts); opts = parse(" any-ip6 "); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertAllPatternsAreNull(opts); } @Test public void testIpMatchPrefix() { AddressPatternOpts opts = parse("ip~.*"); assertEquals(".*", opts.ipMatch()); assertNull(opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipNotMatch()); assertInterfacePattersAreNull(opts); opts = parse(" ip~ .*"); assertEquals(".*", opts.ipMatch()); assertNull(opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipNotMatch()); assertInterfacePattersAreNull(opts); } @Test public void testIpNotMatchPrefix() { AddressPatternOpts opts = parse("!ip~.*"); assertEquals(".*", opts.ipNotMatch()); assertNull(opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipMatch()); assertInterfacePattersAreNull(opts); opts = parse(" !ip~ .* "); assertEquals(".*", opts.ipNotMatch()); assertNull(opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipMatch()); assertInterfacePattersAreNull(opts); } @Test public void testIp4MatchPrefix() { AddressPatternOpts opts = parse("ip4~.*"); assertEquals(".*", opts.ipMatch()); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipNotMatch()); assertInterfacePattersAreNull(opts); opts = parse(" ip4~ .* "); assertEquals(".*", opts.ipMatch()); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipNotMatch()); assertInterfacePattersAreNull(opts); } @Test public void testIp4NotMatchPrefix() { AddressPatternOpts opts = parse("!ip4~.*"); assertEquals(".*", opts.ipNotMatch()); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipMatch()); assertInterfacePattersAreNull(opts); opts = parse(" !ip4~ .* "); assertEquals(".*", opts.ipNotMatch()); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipMatch()); assertInterfacePattersAreNull(opts); } @Test public void testIp6MatchPrefix() { AddressPatternOpts opts = parse("ip6~.*"); assertEquals(".*", opts.ipMatch()); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipNotMatch()); assertInterfacePattersAreNull(opts); opts = parse(" ip6~ .* "); assertEquals(".*", opts.ipMatch()); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipNotMatch()); assertInterfacePattersAreNull(opts); } @Test public void testIp6NotMatchPrefix() { AddressPatternOpts opts = parse("!ip6~.*"); assertEquals(".*", opts.ipNotMatch()); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipMatch()); assertInterfacePattersAreNull(opts); opts = parse(" !ip6~ .* "); assertEquals(".*", opts.ipNotMatch()); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.ipMatch()); assertInterfacePattersAreNull(opts); } @Test public void testInterfaceMatchPrefix() { AddressPatternOpts opts = parse("net~.*"); assertEquals(".*", opts.interfaceMatch()); assertNull(opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceNotMatch()); assertIpPatternsAreNull(opts); opts = parse(" net~ .* "); assertEquals(".*", opts.interfaceMatch()); assertNull(opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceNotMatch()); assertIpPatternsAreNull(opts); } @Test public void testInterface4MatchPrefix() { AddressPatternOpts opts = parse("net4~.*"); assertEquals(".*", opts.interfaceMatch()); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceNotMatch()); assertIpPatternsAreNull(opts); opts = parse(" net4~ .* "); assertEquals(".*", opts.interfaceMatch()); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceNotMatch()); assertIpPatternsAreNull(opts); } @Test public void testInterface6MatchPrefix() { AddressPatternOpts opts = parse("net6~.*"); assertEquals(".*", opts.interfaceMatch()); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceNotMatch()); assertIpPatternsAreNull(opts); opts = parse(" net6~ .* "); assertEquals(".*", opts.interfaceMatch()); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceNotMatch()); assertIpPatternsAreNull(opts); } @Test public void testInterfaceNotMatchPrefix() { AddressPatternOpts opts = parse("!net~.*"); assertEquals(".*", opts.interfaceNotMatch()); assertNull(opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceMatch()); assertIpPatternsAreNull(opts); opts = parse(" !net~ .* "); assertEquals(".*", opts.interfaceNotMatch()); assertNull(opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceMatch()); assertIpPatternsAreNull(opts); } @Test public void testInterface4NotMatchPrefix() { AddressPatternOpts opts = parse("!net4~.*"); assertEquals(".*", opts.interfaceNotMatch()); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceMatch()); assertIpPatternsAreNull(opts); opts = parse(" !net4~ .* "); assertEquals(".*", opts.interfaceNotMatch()); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceMatch()); assertIpPatternsAreNull(opts); } @Test public void testInterface6NotMatchPrefix() { AddressPatternOpts opts = parse("!net6~.*"); assertEquals(".*", opts.interfaceNotMatch()); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceMatch()); assertIpPatternsAreNull(opts); opts = parse(" !net6~ .* "); assertEquals(".*", opts.interfaceNotMatch()); assertSame(AddressPatternOpts.IpVersion.V6, opts.ipVersion()); assertNull(opts.exactAddress()); assertNull(opts.interfaceMatch()); assertIpPatternsAreNull(opts); } @Test public void testExactAddress() { AddressPatternOpts opts = parse("127.0.0.1"); assertEquals("127.0.0.1", opts.exactAddress()); assertNull(opts.ipVersion()); assertInterfacePattersAreNull(opts); assertIpPatternsAreNull(opts); opts = parse(" 127.0.0.1 "); assertEquals("127.0.0.1", opts.exactAddress()); assertNull(opts.ipVersion()); assertInterfacePattersAreNull(opts); assertIpPatternsAreNull(opts); } @Test public void testEmpty() { AddressPatternOpts opts = parse(""); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertAllPatternsAreNull(opts); } @Test public void testNull() { AddressPatternOpts opts = parse(null); assertSame(AddressPatternOpts.IpVersion.V4, opts.ipVersion()); assertNull(opts.exactAddress()); assertAllPatternsAreNull(opts); } @Test public void testToString() { assertEquals("any-ip4", parse(null).toString()); assertEquals("any", parse("any").toString()); assertEquals("ip~.*", parse("ip~.*").toString()); } private void assertAllPatternsAreNull(AddressPatternOpts opts) { assertInterfacePattersAreNull(opts); assertIpPatternsAreNull(opts); } private void assertInterfacePattersAreNull(AddressPatternOpts opts) { assertNull(opts.interfaceMatch()); assertNull(opts.interfaceNotMatch()); } private void assertIpPatternsAreNull(AddressPatternOpts opts) { assertNull(opts.ipMatch()); assertNull(opts.ipNotMatch()); } }
// Copyright 2015 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.runtime; import static com.google.common.truth.Truth.assertThat; import static org.mockito.AdditionalMatchers.find; import static org.mockito.AdditionalMatchers.not; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.contains; import static org.mockito.Mockito.any; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue; import com.google.devtools.build.lib.analysis.test.TestProvider; import com.google.devtools.build.lib.analysis.test.TestProvider.TestParams; import com.google.devtools.build.lib.buildeventstream.BuildEventContext; import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos; import com.google.devtools.build.lib.buildeventstream.BuildEventStreamProtos.TestStatus; import com.google.devtools.build.lib.buildeventstream.PathConverter; import com.google.devtools.build.lib.clock.BlazeClock; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.util.io.AnsiTerminalPrinter; import com.google.devtools.build.lib.vfs.DigestHashFunction; import com.google.devtools.build.lib.vfs.FileSystem; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.inmemoryfs.InMemoryFileSystem; import com.google.devtools.build.lib.view.test.TestStatus.BlazeTestStatus; import com.google.devtools.build.lib.view.test.TestStatus.FailedTestCasesStatus; import com.google.devtools.build.lib.view.test.TestStatus.TestCase; import com.google.devtools.build.lib.view.test.TestStatus.TestCase.Status; import com.google.protobuf.util.Durations; import com.google.protobuf.util.Timestamps; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.InOrder; import org.mockito.Mockito; @RunWith(JUnit4.class) public class TestSummaryTest { private static final String ANY_STRING = ".*?"; private static final String PATH = "package"; private static final String TARGET_NAME = "name"; private ConfiguredTarget stubTarget; private static final ImmutableList<Long> SMALL_TIMING = ImmutableList.of(1L, 2L, 3L, 4L); private static final int CACHED = SMALL_TIMING.size(); private static final int NOT_CACHED = 0; private FileSystem fs; private TestSummary.Builder basicBuilder; @Before public final void createFileSystem() throws Exception { fs = new InMemoryFileSystem(BlazeClock.instance(), DigestHashFunction.SHA256); stubTarget = stubTarget(); basicBuilder = getTemplateBuilder(); } private TestSummary.Builder getTemplateBuilder() { BuildConfigurationValue configuration = Mockito.mock(BuildConfigurationValue.class); when(configuration.checksum()).thenReturn("abcdef"); return TestSummary.newBuilder(stubTarget) .setConfiguration(configuration) .setStatus(BlazeTestStatus.PASSED) .setNumCached(NOT_CACHED) .setActionRan(true) .setRanRemotely(false) .setWasUnreportedWrongSize(false); } private List<Path> getPathList(String... names) { List<Path> list = new ArrayList<>(); for (String name : names) { list.add(fs.getPath(name)); } return list; } @Test public void testShouldProperlyTestLabels() throws Exception { ConfiguredTarget target = target("somepath", "MyTarget"); String expectedString = ANY_STRING + "//somepath:MyTarget" + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summaryStatus = createTestSummary(target, BlazeTestStatus.PASSED, CACHED); TestSummaryPrinter.print(summaryStatus, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testShouldPrintPassedStatus() throws Exception { String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, NOT_CACHED); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); verify(terminalPrinter).print(find(expectedString)); } @Test public void testShouldPrintFailedStatus() throws Exception { String expectedString = ANY_STRING + "ERROR" + ANY_STRING + BlazeTestStatus.FAILED + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.FAILED, NOT_CACHED); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } private void assertShouldNotPrint(BlazeTestStatus status, boolean verboseSummary) { AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print( createTestSummary(stubTarget, status, NOT_CACHED), terminalPrinter, Path::getPathString, verboseSummary, false); verify(terminalPrinter, never()).print(anyString()); } @Test public void testShouldPrintFailedToBuildStatus() { String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.FAILED_TO_BUILD; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(BlazeTestStatus.FAILED_TO_BUILD, NOT_CACHED); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testShouldNotPrintFailedToBuildStatus() { assertShouldNotPrint(BlazeTestStatus.FAILED_TO_BUILD, false); } @Test public void testShouldNotPrintHaltedStatus() { assertShouldNotPrint(BlazeTestStatus.BLAZE_HALTED_BEFORE_TESTING, true); } @Test public void testShouldPrintCachedStatus() throws Exception { String expectedString = ANY_STRING + "\\(cached" + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, CACHED); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testPartialCachedStatus() throws Exception { String expectedString = ANY_STRING + "\\(3/4 cached" + ANY_STRING; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, CACHED - 1); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testIncompleteCached() throws Exception { AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.INCOMPLETE, CACHED - 1); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); verify(terminalPrinter).print(not(contains("cached"))); } @Test public void testShouldPrintUncachedStatus() throws Exception { AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, NOT_CACHED); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); verify(terminalPrinter).print(not(contains("cached"))); } @Test public void testNoTiming() throws Exception { String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = createTestSummary(stubTarget, BlazeTestStatus.PASSED, NOT_CACHED); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testBuilder() throws Exception { // No need to copy if built twice in a row; no direct setters on the object. TestSummary summary = basicBuilder.build(); TestSummary sameSummary = basicBuilder.build(); assertThat(sameSummary).isSameInstanceAs(summary); basicBuilder.addTestTimes(ImmutableList.of(40L)); TestSummary summaryCopy = basicBuilder.build(); assertThat(summaryCopy.getTarget()).isEqualTo(summary.getTarget()); assertThat(summaryCopy.getStatus()).isEqualTo(summary.getStatus()); assertThat(summaryCopy.numCached()).isEqualTo(summary.numCached()); assertThat(summaryCopy).isNotSameInstanceAs(summary); assertThat(summary.totalRuns()).isEqualTo(0); assertThat(summaryCopy.totalRuns()).isEqualTo(1); // Check that the builder can add a new warning to the copy, // despite the immutability of the original. basicBuilder.addTestTimes(ImmutableList.of(60L)); TestSummary fiftyCached = basicBuilder.setNumCached(50).build(); assertThat(fiftyCached.getStatus()).isEqualTo(summary.getStatus()); assertThat(fiftyCached.numCached()).isEqualTo(50); assertThat(fiftyCached.totalRuns()).isEqualTo(2); TestSummary sixtyCached = basicBuilder.setNumCached(60).build(); assertThat(sixtyCached.numCached()).isEqualTo(60); assertThat(fiftyCached.numCached()).isEqualTo(50); } @Test public void testAsStreamProto() throws Exception { TestParams testParams = mock(TestParams.class); when(testParams.getRuns()).thenReturn(2); when(testParams.getShards()).thenReturn(3); TestProvider testProvider = new TestProvider(testParams); when(stubTarget.getProvider(eq(TestProvider.class))).thenReturn(testProvider); PathConverter pathConverter = mock(PathConverter.class); when(pathConverter.apply(any(Path.class))) .thenAnswer( invocation -> "/path/to" + ((Path) invocation.getArguments()[0]).getPathString()); BuildEventContext converters = mock(BuildEventContext.class); when(converters.pathConverter()).thenReturn(pathConverter); TestSummary summary = basicBuilder .setStatus(BlazeTestStatus.FAILED) .addPassedLogs(getPathList("/apple")) .addFailedLogs(getPathList("/pear")) .mergeTiming(1000, 300) .build(); assertThat(summary.asStreamProto(converters).getTestSummary()) .isEqualTo( BuildEventStreamProtos.TestSummary.newBuilder() .setOverallStatus(TestStatus.FAILED) .setFirstStartTimeMillis(1000) .setFirstStartTime(Timestamps.fromMillis(1000)) .setLastStopTimeMillis(1300) .setLastStopTime(Timestamps.fromMillis(1300)) .setTotalRunDurationMillis(300) .setTotalRunDuration(Durations.fromMillis(300)) .setRunCount(2) .setShardCount(3) .addPassed(BuildEventStreamProtos.File.newBuilder().setUri("/path/to/apple")) .addFailed(BuildEventStreamProtos.File.newBuilder().setUri("/path/to/pear")) .build()); } @Test public void testSingleTime() throws Exception { String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED + ANY_STRING + "in 3.4s"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder.addTestTimes(ImmutableList.of(3412L)).build(); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testNoTime() throws Exception { // The last part matches anything not containing "in". String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED + "(?!in)*"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder.addTestTimes(ImmutableList.of(3412L)).build(); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, false, false); terminalPrinter.print(find(expectedString)); } @Test public void testMultipleTimes() throws Exception { String expectedString = ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED + ANY_STRING + "\n Stats over 3 runs: max = 3.0s, min = 1.0s, " + "avg = 2.0s, dev = 0.8s"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder .addTestTimes(ImmutableList.of(1000L, 2000L, 3000L)) .build(); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testCoverageDataReferences() throws Exception { List<Path> paths = getPathList("/cov1.dat", "/cov2.dat", "/cov3.dat", "/cov4.dat"); FileSystemUtils.writeContentAsLatin1(paths.get(1), "something"); FileSystemUtils.writeContentAsLatin1(paths.get(3), ""); FileSystemUtils.writeContentAsLatin1(paths.get(3), "something else"); TestSummary summary = basicBuilder.addCoverageFiles(paths).build(); AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); verify(terminalPrinter).print(find(ANY_STRING + "INFO" + ANY_STRING + BlazeTestStatus.PASSED)); verify(terminalPrinter).print(find(" /cov2.dat")); verify(terminalPrinter).print(find(" /cov4.dat")); } @Test public void testFlakyAttempts() throws Exception { String expectedString = ANY_STRING + "WARNING" + ANY_STRING + BlazeTestStatus.FLAKY + ANY_STRING + ", failed in 2 out of 3"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder .setStatus(BlazeTestStatus.FLAKY) .addPassedLogs(getPathList("/a")) .addFailedLogs(getPathList("/b", "/c")) .build(); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testNumberOfFailedRuns() throws Exception { String expectedString = ANY_STRING + "ERROR" + ANY_STRING + BlazeTestStatus.FAILED + ANY_STRING + "in 2 out of 3"; AnsiTerminalPrinter terminalPrinter = Mockito.mock(AnsiTerminalPrinter.class); TestSummary summary = basicBuilder .setStatus(BlazeTestStatus.FAILED) .addPassedLogs(getPathList("/a")) .addFailedLogs(getPathList("/b", "/c")) .build(); TestSummaryPrinter.print(summary, terminalPrinter, Path::getPathString, true, false); terminalPrinter.print(find(expectedString)); } @Test public void testFileNamesNotShown() throws Exception { List<TestCase> emptyDetails = ImmutableList.of(); TestSummary summary = basicBuilder .setStatus(BlazeTestStatus.FAILED) .addPassedLogs(getPathList("/apple")) .addFailedLogs(getPathList("/pear")) .addCoverageFiles(getPathList("/maracuja")) .addFailedTestCases(emptyDetails, FailedTestCasesStatus.FULL) .build(); // Check that only //package:name is printed. AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, Path::getPathString, true, true); verify(printer).print(contains("//package:name")); } @Test public void testMessageShownWhenTestCasesMissing() throws Exception { ImmutableList<TestCase> emptyList = ImmutableList.of(); TestSummary summary = createTestSummaryWithDetails( BlazeTestStatus.FAILED, emptyList, FailedTestCasesStatus.NOT_AVAILABLE); AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, Path::getPathString, true, true); verify(printer).print(contains("//package:name")); verify(printer).print(contains("not available")); } @Test public void testMessageShownForPartialResults() throws Exception { ImmutableList<TestCase> testCases = ImmutableList.of(newDetail("orange", TestCase.Status.FAILED, 1500L)); TestSummary summary = createTestSummaryWithDetails(BlazeTestStatus.FAILED, testCases, FailedTestCasesStatus.PARTIAL); AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, Path::getPathString, true, true); verify(printer).print(contains("//package:name")); verify(printer).print(find("FAILED.*orange")); verify(printer).print(contains("incomplete")); } private TestCase newDetail(String name, TestCase.Status status, long duration) { return TestCase.newBuilder() .setName(name) .setStatus(status) .setRunDurationMillis(duration) .build(); } @Test public void testTestCaseNamesShownWhenNeeded() throws Exception { TestCase detailPassed = newDetail("strawberry", TestCase.Status.PASSED, 1000L); TestCase detailFailed = newDetail("orange", TestCase.Status.FAILED, 1500L); TestSummary summaryPassed = createTestSummaryWithDetails( BlazeTestStatus.PASSED, Arrays.asList(detailPassed)); TestSummary summaryFailed = createTestSummaryWithDetails( BlazeTestStatus.FAILED, Arrays.asList(detailPassed, detailFailed)); assertThat(summaryFailed.getStatus()).isEqualTo(BlazeTestStatus.FAILED); AnsiTerminalPrinter printerPassed = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summaryPassed, printerPassed, Path::getPathString, true, true); verify(printerPassed).print(contains("//package:name")); AnsiTerminalPrinter printerFailed = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summaryFailed, printerFailed, Path::getPathString, true, true); verify(printerFailed).print(contains("//package:name")); verify(printerFailed).print(find("FAILED.*orange *\\(1\\.5")); } @Test public void testTestCaseNamesOrdered() throws Exception { TestCase[] details = { newDetail("apple", TestCase.Status.FAILED, 1000L), newDetail("banana", TestCase.Status.FAILED, 1000L), newDetail("cranberry", TestCase.Status.FAILED, 1000L) }; // The exceedingly dumb approach: writing all the permutations down manually // is simply easier than any way of generating them. int[][] permutations = { { 0, 1, 2 }, { 0, 2, 1 }, { 1, 0, 2 }, { 1, 2, 0 }, { 2, 0, 1 }, { 2, 1, 0 } }; for (int[] permutation : permutations) { List<TestCase> permutatedDetails = new ArrayList<>(); for (int element : permutation) { permutatedDetails.add(details[element]); } TestSummary summary = createTestSummaryWithDetails(BlazeTestStatus.FAILED, permutatedDetails); // A mock that checks the ordering of method calls AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, Path::getPathString, true, true); InOrder order = Mockito.inOrder(printer); order.verify(printer).print(contains("//package:name")); order.verify(printer).print(find("FAILED.*apple")); order.verify(printer).print(find("FAILED.*banana")); order.verify(printer).print(find("FAILED.*cranberry")); } } @Test public void testCachedResultsFirstInSort() throws Exception { TestSummary summaryFailedCached = createTestSummary(BlazeTestStatus.FAILED, CACHED); TestSummary summaryFailedNotCached = createTestSummary(BlazeTestStatus.FAILED, NOT_CACHED); TestSummary summaryPassedCached = createTestSummary(BlazeTestStatus.PASSED, CACHED); TestSummary summaryPassedNotCached = createTestSummary(BlazeTestStatus.PASSED, NOT_CACHED); // This way we can make the test independent from the sort order of FAILEd // and PASSED. assertThat(summaryFailedCached.compareTo(summaryPassedNotCached)).isLessThan(0); assertThat(summaryPassedCached.compareTo(summaryFailedNotCached)).isLessThan(0); } @Test public void testCollectingFailedDetails() throws Exception { TestCase rootCase = TestCase.newBuilder() .setName("tests") .setRunDurationMillis(5000L) .addChild(newDetail("apple", TestCase.Status.FAILED, 1000L)) .addChild(newDetail("banana", TestCase.Status.PASSED, 1000L)) .addChild(newDetail("cherry", TestCase.Status.ERROR, 1000L)) .build(); TestSummary summary = getTemplateBuilder().collectTestCases(rootCase).setStatus(BlazeTestStatus.FAILED).build(); AnsiTerminalPrinter printer = Mockito.mock(AnsiTerminalPrinter.class); TestSummaryPrinter.print(summary, printer, Path::getPathString, true, true); verify(printer).print(contains("//package:name")); verify(printer).print(find("FAILED.*apple")); verify(printer).print(find("ERROR.*cherry")); } @Test public void countTotalTestCases() throws Exception { TestCase rootCase = TestCase.newBuilder() .setName("tests") .setRunDurationMillis(5000L) .addChild(newDetail("apple", TestCase.Status.FAILED, 1000L)) .addChild(newDetail("banana", TestCase.Status.PASSED, 1000L)) .addChild(newDetail("cherry", TestCase.Status.ERROR, 1000L)) .build(); TestSummary summary = getTemplateBuilder().collectTestCases(rootCase).setStatus(BlazeTestStatus.FAILED).build(); assertThat(summary.getTotalTestCases()).isEqualTo(3); } @Test public void countUnknownTestCases() throws Exception { TestSummary summary = getTemplateBuilder().collectTestCases(null).setStatus(BlazeTestStatus.FAILED).build(); assertThat(summary.getTotalTestCases()).isEqualTo(1); assertThat(summary.getUnkownTestCases()).isEqualTo(1); } @Test public void countNotRunTestCases() throws Exception { TestCase a = TestCase.newBuilder() .addChild( TestCase.newBuilder().setName("A").setStatus(Status.PASSED).setRun(true).build()) .addChild( TestCase.newBuilder().setName("B").setStatus(Status.PASSED).setRun(true).build()) .addChild( TestCase.newBuilder().setName("C").setStatus(Status.PASSED).setRun(false).build()) .build(); TestSummary summary = getTemplateBuilder().collectTestCases(a).setStatus(BlazeTestStatus.FAILED).build(); assertThat(summary.getTotalTestCases()).isEqualTo(2); assertThat(summary.getUnkownTestCases()).isEqualTo(0); assertThat(summary.getFailedTestCases()).isEmpty(); } @Test public void countTotalTestCasesInNestedTree() throws Exception { TestCase aCase = TestCase.newBuilder() .setName("tests-1") .setRunDurationMillis(5000L) .addChild(newDetail("apple", TestCase.Status.FAILED, 1000L)) .addChild(newDetail("banana", TestCase.Status.PASSED, 1000L)) .addChild(newDetail("cherry", TestCase.Status.ERROR, 1000L)) .build(); TestCase anotherCase = TestCase.newBuilder() .setName("tests-2") .setRunDurationMillis(5000L) .addChild(newDetail("apple", TestCase.Status.FAILED, 1000L)) .addChild(newDetail("banana", TestCase.Status.PASSED, 1000L)) .addChild(newDetail("cherry", TestCase.Status.ERROR, 1000L)) .build(); TestCase rootCase = TestCase.newBuilder().setName("tests").addChild(aCase).addChild(anotherCase).build(); TestSummary summary = getTemplateBuilder().collectTestCases(rootCase).setStatus(BlazeTestStatus.FAILED).build(); assertThat(summary.getTotalTestCases()).isEqualTo(6); } private ConfiguredTarget target(String path, String targetName) throws Exception { ConfiguredTarget target = Mockito.mock(ConfiguredTarget.class); when(target.getLabel()).thenReturn(Label.create(path, targetName)); when(target.getConfigurationChecksum()).thenReturn("abcdef"); TestParams mockParams = Mockito.mock(TestParams.class); when(mockParams.getShards()).thenReturn(1); when(target.getProvider(TestProvider.class)).thenReturn(new TestProvider(mockParams)); return target; } private ConfiguredTarget stubTarget() throws Exception { return target(PATH, TARGET_NAME); } private TestSummary createTestSummaryWithDetails(BlazeTestStatus status, List<TestCase> details) { TestSummary summary = getTemplateBuilder() .setStatus(status) .addFailedTestCases(details, FailedTestCasesStatus.FULL) .build(); return summary; } private TestSummary createTestSummaryWithDetails( BlazeTestStatus status, List<TestCase> testCaseList, FailedTestCasesStatus detailsStatus) { TestSummary summary = getTemplateBuilder() .setStatus(status) .addFailedTestCases(testCaseList, detailsStatus) .build(); return summary; } private static TestSummary createTestSummary(ConfiguredTarget target, BlazeTestStatus status, int numCached) { ImmutableList<TestCase> emptyList = ImmutableList.of(); return TestSummary.newBuilder(target) .setStatus(status) .setNumCached(numCached) .setActionRan(true) .setRanRemotely(false) .setWasUnreportedWrongSize(false) .addFailedTestCases(emptyList, FailedTestCasesStatus.FULL) .addTestTimes(SMALL_TIMING) .build(); } private TestSummary createTestSummary(BlazeTestStatus status, int numCached) { TestSummary summary = getTemplateBuilder() .setStatus(status) .setNumCached(numCached) .addTestTimes(SMALL_TIMING) .build(); return summary; } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *******************************************************************************/ package org.apache.wink.common.internal.i18n; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import java.util.Vector; import java.util.regex.Matcher; import java.util.regex.Pattern; import junit.framework.TestCase; /** * * Intent of MessagesTest class is to check the specified translation properties files against the java source code for: * * 1) checks that the strings that should be externalized are externalized (only debug messages do not need to be externalized) * 2) checks that all keys referred to by Messages.getMessage actually exist * 3) checks that there are no unused keys in the resource.properties file * 4) checks that the number of params matches up with the number of braces {} in a formatted log string * */ public class MessagesTest extends TestCase { private String workSpacePath = null; private MessageStringsCache messageStrings = null; private Properties unusedProps; // some necessary pre-compiled patterns: static final Pattern patternForNoLogger = Pattern.compile("\\G.*?((Messages\\s*?\\.\\s*?getMessage.*?));", Pattern.COMMENTS); static final Pattern patternIntRequired = Pattern.compile("\\G.*?\\{(\\d+?)}", Pattern.COMMENTS); static final Pattern patternIntNotRequired = Pattern.compile("\\G.*?\\{}", Pattern.COMMENTS); // default resource file in case of unittest environment private static String defaultResourceFile = "wink-common/src/main/resources/org/apache/wink/common/internal/i18n/resource.properties"; static { defaultResourceFile = defaultResourceFile.replace("/", System.getProperty("file.separator")); } /** * * A cache to hold the formatted strings with their brace counts, so counts have to be taken again * and again for a formatted string that is used many times. * */ private static class MessageStringsCache { // the cache private HashMap<String, Integer> stringsToBraceCount = new HashMap<String, Integer>(); private Properties messageProps = null; /** * Keeps a copy of the original message properties as a convenience to users of this class. * * @param props original message properties */ public MessageStringsCache(Properties props) { messageProps = new Properties(); messageProps.putAll(props); } /** * * @param key into the messages properties * @param filePath param is passed to produce meaningful failure message only * @return */ public String getFormattedStringByKey(String key, String filePath) { String formattedString = messageProps.getProperty(key); if (formattedString == null) { fail("Expected to find non-null property with key \n" + key + "\n used by\n" + filePath); } else if (formattedString.equals("")) { fail("Expected to find non-empty property with key \n" + key + "\n used by\n" + filePath); } return formattedString; } /** * * @param key srcFile into the messages properties * @param intRequired if braces are formatted with an integer n, like {n}, intRequired should be set to true * @param filePath param is passed to produce meaningful failure message only * @return count of all {} when intRequired = false or unique {n} occurrences when intRequired = true */ public int getBraceCountByKey(String key, boolean intRequired, String filePath) { String formattedString = getFormattedStringByKey(key, filePath); if (formattedString != null) { return getBraceCount(formattedString, intRequired); } return -1; } /** * * @param string the actual formatted message string * @param intRequired if braces are formatted with an integer n, like {n}, intRequired should be set to true * @return count of all {} when intRequired = false or unique {n} occurrences when intRequired = true */ public int getBraceCount(String string, boolean intRequired) { if (!stringsToBraceCount.containsKey(string)) { // count the number of occurrences of {} or {n} where n is an int Pattern pattern; if (intRequired) { pattern = patternIntRequired; } else { pattern = patternIntNotRequired; } Matcher matcher = pattern.matcher(string); int counter = 0; if (intRequired) { // string may contain multiple {0} constructs. We want to count the unique integers HashSet<String> ints = new HashSet<String>(); while(matcher.find()) { ints.add(matcher.group(1)); } counter = ints.size(); } else { while(matcher.find()) { counter++; } } stringsToBraceCount.put(string, counter); } return stringsToBraceCount.get(string); } } @Override public void setUp() { try { unusedProps = new Properties(); System.out.println("Loading properties from: " + getWorkspacePath() + defaultResourceFile); unusedProps.load(new FileInputStream(getWorkspacePath() + defaultResourceFile)); messageStrings = new MessageStringsCache(unusedProps); } catch (Throwable t) { fail("Could not load properties due to: " + t + ": " + t.getMessage()); } } /** * Filter to determine which files to scan. Scanner will only accept *.java files, but additional * exclusion filters may be specified on the command line. * */ private static class JavaSrcFilenameFilter implements FilenameFilter { /** * @param dir path up to, but not including, the filename * @param name of the file * @return true if dir and name satisfy all of the filter rules */ public boolean accept(File dir, String name) { // try to filter down to just production code source String dirString = dir.toString(); if (!dirString.contains(".svn") && !dirString.contains("src" + System.getProperty("file.separator") + "test") && !dirString.contains("wink-examples") && !dirString.contains("wink-itests") && !dirString.contains("wink-component-test-support") && !dirString.contains("wink-assembly") && name.endsWith(".java")) { return true; } return false; } } /** * recursively collect list of filtered files * * @param directory * @param filter * @return */ private static Collection<File> listFiles(File directory, FilenameFilter filter) { Vector<File> files = new Vector<File>(); File[] entries = directory.listFiles(); for (File entry : entries) { if (filter == null || filter.accept(directory, entry.getName())) { files.add(entry); } if (entry.isDirectory()) { files.addAll(listFiles(entry, filter)); } } return files; } /** * Used in junit only * @return full filesystem path to the workspace root */ private String getWorkspacePath() { if (workSpacePath == null) { // set up the default properties file in the location where the RestServlet will find it upon test execution String classPath = System.getProperty("java.class.path"); StringTokenizer tokenizer = new StringTokenizer(classPath, System.getProperty("path.separator")); while (tokenizer.hasMoreElements()) { String temp = tokenizer.nextToken(); if (temp.endsWith("test-classes")) { if (!temp.startsWith(System.getProperty("file.separator"))) { // must be on Windows. get rid of "c:" temp = temp.substring(2, temp.length()); } workSpacePath = temp; break; } } if (workSpacePath == null) { fail("Failed to find test-classes directory to assist in finding workspace root"); } // move up to peer path of wink-common (so, minus wink-common/target/test-classes workSpacePath = workSpacePath.substring(0, workSpacePath.length() - 31); } return workSpacePath; } /** * extracts the quoted string, and splits the string at the first comma not in quotes. * String parameter will be something like either of the following: * * Messages.getMessage("someKeyToMessageProps", object1, object2) * Messages.getMessage(SOME_STATIC_VAR, object1) * Messages.getMessage(SOME_STATIC_VAR), object1 * "there was a problem with {} and {}", object1, object2 * * Result will be an array of strings, like: * * {"someKeyToMessageProps", " object1, object2"} * {"SOME_STATIC_VAR", " object1"} * {"SOME_STATIC_VAR"} * {"there was a problem with {} and {}", " object1, object2"} * * @param string to parse * @param fileText the full text of the file being scanned, in case we need to go retrieve the value of a static var * @param filePath param is passed to produce meaningful failure message only * @return */ private String[] splitString(String string, String fileText, String filePath) { String copy = new String(string); copy = copy.replace("\\\"", ""); // replace any escaped quotes // extract the part past Messages.getMessage, if necessary: if (!copy.startsWith("\"")) { // get whatever is between the matched parens: Pattern extractStringInParen = Pattern.compile("Messages\\s*\\.\\s*getMessage(FromBundle)??\\s*\\(\\s*(.*)"); Matcher matcher = extractStringInParen.matcher(copy); if (matcher.matches()) { copy = matcher.group(2); } } if (!copy.startsWith("\"")) { // it's likely a static var, not a hard-coded string, so split on the commas and be done with it; best effort StringTokenizer tokenizer = new StringTokenizer(copy, ","); String[] strings = new String[2]; String staticVar = tokenizer.nextToken().trim(); // go extract the real value of staticVar, which will be the key into the resource properties file Pattern extractStaticVarValuePattern = Pattern.compile(".*" + staticVar + "\\s*=\\s*\"(.*?)\"\\s*;.*"); Matcher matcher = extractStaticVarValuePattern.matcher(fileText); if (matcher.matches()) { strings[0] = matcher.group(1); } else { fail("Could not find value of variable " + staticVar + " in " + filePath); } String restOfString = null; if (tokenizer.hasMoreTokens()) { restOfString = ""; while (tokenizer.hasMoreTokens()) { restOfString += "," + tokenizer.nextToken().trim(); } restOfString = restOfString.substring(1);// skip first comma } strings[1] = restOfString; return strings; } // look for a the sequence quote followed by comma ByteArrayInputStream bais = new ByteArrayInputStream(copy.getBytes()); boolean outsideQuotedString = false; int endHardStringCounter = 1; // skip past the first quote int ch = bais.read(); // find the matched quote and end paren; best effort here int endParenCounter = 1; int parenDepth = 1; boolean hardStringDone = false; while ((ch = bais.read()) != -1) { if (ch == '"' && !outsideQuotedString) outsideQuotedString = true; else if ((ch == ',') && outsideQuotedString) hardStringDone = true; else if ((ch == ')') && outsideQuotedString && ((--parenDepth) == 0)) break; else if ((ch == '(') && outsideQuotedString) { parenDepth++; } else if (ch == '"' && outsideQuotedString) // the quoted string continues, like: "we have " + count + " apples" outsideQuotedString = false; endParenCounter++; if (!hardStringDone) endHardStringCounter++; } try { bais.close(); } catch (IOException e) { } String hardCodedString = copy.substring(1, endHardStringCounter-1).trim(); // clean up, if necessary: while (hardCodedString.endsWith("\"")) hardCodedString = hardCodedString.substring(0, hardCodedString.length()-1); String restOfString = null; if (endHardStringCounter < copy.length()) { restOfString = copy.substring(endHardStringCounter, endParenCounter); restOfString = restOfString.substring(restOfString.indexOf(",")+1); // skip the first comma restOfString = restOfString.trim(); } return new String[]{hardCodedString, restOfString}; } /* * inspect the string. Note the parens of the * passed String parameter may not be balanced. * * String will be something like either of the following: * * Messages.getMessage("someKeyToMessageProps"), object1, object2 * "there was a problem with {} and {}", object1, object2 * * srcFile param is so we can print an informative failure message. * unusedProps is so we can delete key/value pairs as we encounter them in source, so we can make sure there * are no unnecessary key/value pairs in the message file */ private void parseAndInspect(String string, boolean externalizationRequired, String fileText, String filePath, Properties unusedProps) { // expect a string with unmatched parens, but we don't care. We just want to know if the messages file has // the string if Messages.getMessage is called, and if the number of {} in the string matches up with the num of params // clean up a bit string = string.trim(); if (string.endsWith(")")) { string = string.substring(0, string.length() - 1); string = string.trim(); } if (!string.startsWith("Messages") && string.startsWith("\"") && externalizationRequired) { fail("Externalization is required for parameter " + "\"" + string + "\" statement in " + filePath); } // short circuit: message passed to logger is just a variable, like Exception.getMessage(), so there's nothing to check if (!string.startsWith("Messages") && !string.startsWith("\"")) { return; } String[] splitString; splitString = splitString(string, fileText, filePath); // split between quoted part of the first param, and the rest if (splitString.length == 0) { // means we couldn't find the value of the static var used as the key into Messages.getMessage // error message already printed, nothing else to check return; } int chickenLips = 0; if (string.startsWith("Messages")) { chickenLips = messageStrings.getBraceCountByKey(splitString[0], true, filePath); if (chickenLips == -1) { // no key was found, error message already printed, nothing else to check return; } unusedProps.remove(splitString[0]); } else if (string.startsWith("\"")) { chickenLips = messageStrings.getBraceCount(splitString[0], false); } // ok, there better be chickenLips many more tokens! int remainingParams = 0; if (splitString[1] != null) { StringTokenizer tokenizer = new StringTokenizer(splitString[1], ","); remainingParams = tokenizer.countTokens(); } // SLF4J logger can take an extra exception param if (chickenLips == remainingParams-1) { // token count may be one greater than chickenlips, since messages may be something like: // logger.trace("abcd", new RuntimeException()); // or: // logger.error(Messages.getMessage("saxParseException", type.getName()), e); // System.out.print("\nWARNING: Expected " + chickenLips + " parameters, but found " + tokenizer.countTokens() + (string.startsWith("Messages") ? " for key " : " for formatted string ") + // "\"" + splitString[0] + "\" in " + srcFile + ". SLF4J allows an Exception as a parameter with no braces in the formatted message, but you should confirm this is ok."); return; } if (remainingParams != chickenLips) { fail("Expected " + chickenLips + " parameters, but found " + remainingParams + (string.startsWith("Messages") ? " for key " : " for formatted string ") + "\"" + splitString[0] + "\" in " + filePath); } } /** * getFilteredFileContents will filter out all comments in .java source files and return the contents * in a single line. A single space character replaces newlines. * @param file * @return * @throws IOException */ private static String getFilteredFileContents(File file) { String fileText = ""; try { FileInputStream fis = new FileInputStream(file); BufferedReader br = new BufferedReader(new InputStreamReader(fis)); String line = null; while((line = br.readLine()) != null) { // get rid of single-line comments int eol = line.indexOf("//"); if (eol == -1) { fileText += line; } else { fileText += line.substring(0, eol); } fileText += " "; // to be safe, since we're smashing the whole file down into one line } br.close(); fis.close(); } catch (IOException e) { fail(e.getMessage() + " while reading " + file.getAbsolutePath()); } return fileText.replaceAll("/\\*.*?\\*/", ""); // get rid of comment blocks } // check all production code .java files for all calls to Logger.trace, error, warn, and info to ensure // that the formatted string is correct, and that the reference, if any, to resource.properties keys is correct. public void testMessages() throws IOException { // to find the Logger variable name: Pattern patternLoggerRef = Pattern.compile(".*?\\s+?Logger\\s+?([\\p{Alnum}|_]+).*"); int progressCounter = 0; ArrayList<File> files = new ArrayList<File>(); String path = getWorkspacePath(); System.out.println("Collecting list of files to scan..."); files.addAll(listFiles(new File(path), new JavaSrcFilenameFilter())); System.out.println("Checking " + files.size() + " files."); for (File file: files) { String fileText = getFilteredFileContents(file); Matcher matcher = patternLoggerRef.matcher(fileText); String loggerVariableName = null; // indicate some progress for IDE users System.out.print("."); progressCounter++; if(progressCounter % 10 == 0) { System.out.println(progressCounter); } if (matcher.matches()) { loggerVariableName = matcher.group(1); } // now that we know what the logger variable name is, we can inspect any calls made to its methods: // (we can't really use regex here to match balanced parentheses) ArrayList<Pattern> betweenLoggersPatterns = new ArrayList<Pattern>(); if (loggerVariableName != null) { betweenLoggersPatterns.add(Pattern.compile("\\G.*?" + loggerVariableName + "\\s*?\\.\\s*?(info|trace|debug|error|warn)\\s*?\\((.*?);", Pattern.COMMENTS)); betweenLoggersPatterns.add(patternForNoLogger); // some patterns may get checked twice, but that's ok } else { betweenLoggersPatterns.add(patternForNoLogger); } for (Pattern betweenLoggersPattern: betweenLoggersPatterns.toArray(new Pattern[]{})) { Matcher betweenLoggersMatcher = betweenLoggersPattern.matcher(fileText); while (betweenLoggersMatcher.find()) { boolean externalizationRequired = !betweenLoggersMatcher.group(1).equals("debug") && !betweenLoggersMatcher.group(1).equals("trace"); parseAndInspect(betweenLoggersMatcher.group(2), externalizationRequired, fileText, file.getAbsolutePath(), unusedProps); } } } if (!unusedProps.isEmpty()) { Set<Object> keys = unusedProps.keySet(); for (Object key : keys.toArray()) { System.err.println("key \"" + key + "\" is unused."); } fail("There are some unused key/value pairs in one or more of your properties message files. See System.err for this test for the list of unused keys."); } System.out.println("Done."); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.jini.test.spec.jeri.util; import net.jini.jeri.BasicInvocationDispatcher; import net.jini.jeri.InboundRequest; import net.jini.jeri.ServerCapabilities; import net.jini.core.constraint.InvocationConstraints; import net.jini.core.constraint.MethodConstraints; import java.lang.reflect.Method; import java.rmi.Remote; import java.rmi.server.ExportException; import java.io.ObjectOutputStream; import java.io.ObjectInputStream; import java.io.IOException; import java.util.logging.Logger; import java.util.Collection; /** * A fake BasicInvocationDispatcher implementation that can be configured * to throw exceptions in it's protected methods. */ public class FakeBasicInvocationDispatcher extends BasicInvocationDispatcher { Logger logger; Throwable unmarshalArgumentsException; Throwable unmarshalMethodException; Throwable marshalReturnException; Throwable marshalThrowException; Throwable createMarshalInputStreamException; Throwable createMarshalOutputStreamException; Throwable checkAccessException; Throwable invokeException; /** * Constructs a FakeBasicInvocationDispatcher. */ public FakeBasicInvocationDispatcher(Collection methods, ServerCapabilities serverCaps, MethodConstraints serverConstraints, Class permClass, ClassLoader classLoader) throws ExportException { super(methods,serverCaps,serverConstraints,permClass,classLoader); logger = Logger.getLogger("com.sun.jini.qa.harness.test"); logger.entering(getClass().getName(),"constructor"); } /*** *** The following methods configure the exceptions to throw from the *** various protected methods. Mapping from these "set" methods to *** the corresponding protected method should be obvious. *** *** @param t the exception to throw ***/ public void setCheckAccessException(Throwable t) { checkAccessException = t; } public void setCreateMarshalInputStreamException(Throwable t) { createMarshalInputStreamException = t; } public void setCreateMarshalOutputStreamException(Throwable t) { createMarshalOutputStreamException = t; } public void setInvokeException(Throwable t) { invokeException = t; } public void setUnmarshalMethodException(Throwable t) { unmarshalMethodException = t; } public void setUnmarshalArgumentsException(Throwable t) { unmarshalArgumentsException = t; } public void setMarshalReturnException(Throwable t) { marshalReturnException = t; } public void setMarshalThrowException(Throwable t) { marshalThrowException = t; } /*** *** Overridden protected methods that will throw a *** configured exception set with the corresponding *** "set" method above. If the configured exception is null, then *** these methods will call <code>super.<method>(...)</code>. *** *** @throws configured exception if not null *** @throws AssertionError if configured exception was set but *** is not an instance of an exception thrown by the method ***/ public void checkAccess(Remote impl, Method method, InvocationConstraints constraints, Collection context) { logger.entering(getClass().getName(),"checkAccess"); if (checkAccessException != null) { throwUnchecked(checkAccessException); throw new AssertionError(); } super.checkAccess(impl,method,constraints,context); } public ObjectInputStream createMarshalInputStream(Object impl, InboundRequest request, boolean integrity, Collection context) throws IOException { logger.entering(getClass().getName(),"createMarshalInputStream"); if (createMarshalInputStreamException != null) { throwIOE(createMarshalInputStreamException); throw new AssertionError(); } return super.createMarshalInputStream(impl,request,integrity,context); } public ObjectOutputStream createMarshalOutputStream(Object impl, Method method, InboundRequest request, Collection context) throws IOException { logger.entering(getClass().getName(),"createMarshalOutputStream"); if (createMarshalOutputStreamException != null) { throwIOE(createMarshalOutputStreamException); throw new AssertionError(); } return super.createMarshalOutputStream(impl,method,request,context); } public Object invoke(Remote impl, Method method, Object[] args, Collection context) throws Throwable { logger.entering(getClass().getName(),"invoke"); if (invokeException != null) { throw invokeException; } return super.invoke(impl,method,args,context); } public Object[] unmarshalArguments(Remote impl, Method method, ObjectInputStream in, Collection c) throws IOException, ClassNotFoundException { logger.entering(getClass().getName(),"unmarshalArguments"); if (unmarshalArgumentsException != null) { throwIOE(unmarshalArgumentsException); throwCNFE(unmarshalArgumentsException); throw new AssertionError(); } return super.unmarshalArguments(impl,method,in,c); } public Method unmarshalMethod(Remote impl, ObjectInputStream in, Collection c) throws IOException, ClassNotFoundException, NoSuchMethodException { logger.entering(getClass().getName(),"unmarshalMethod"); if (unmarshalMethodException != null) { throwIOE(unmarshalMethodException); throwCNFE(unmarshalMethodException); throwNSME(unmarshalMethodException); throw new AssertionError(); } return super.unmarshalMethod(impl,in,c); } public void marshalReturn(Remote impl, Method method, Object returnValue, ObjectOutputStream out, Collection c) throws IOException { logger.entering(getClass().getName(),"marshalReturn"); if (marshalReturnException != null) { throwIOE(marshalReturnException); throw new AssertionError(); } super.marshalReturn(impl,method,returnValue,out,c); } public void marshalThrow(Remote impl, Method method, Throwable throwable, ObjectOutputStream out, Collection c) throws IOException { logger.entering(getClass().getName(),"marshalThrow"); if (marshalThrowException != null) { throwIOE(marshalThrowException); throw new AssertionError(); } super.marshalThrow(impl,method,throwable,out,c); } public ClassLoader getClassLoader0() { return super.getClassLoader(); } // ********************************************** // private void throwCNFE(Throwable t) throws ClassNotFoundException { if (t instanceof ClassNotFoundException) { throw (ClassNotFoundException) t; } throwUnchecked(t); } private void throwNSME(Throwable t) throws NoSuchMethodException { if (t instanceof NoSuchMethodException) { throw (NoSuchMethodException) t; } throwUnchecked(t); } private void throwIOE(Throwable t) throws IOException { if (t instanceof IOException) { throw (IOException) t; } throwUnchecked(t); } private void throwUnchecked(Throwable t) throws RuntimeException, Error { if (t instanceof RuntimeException) { throw (RuntimeException) t; } else if (t instanceof Error) { throw (Error) t; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.tribes.group.interceptors; import java.util.ArrayList; import java.util.Arrays; import org.apache.catalina.tribes.Channel; import org.apache.catalina.tribes.ChannelException; import org.apache.catalina.tribes.ChannelInterceptor; import org.apache.catalina.tribes.ChannelMessage; import org.apache.catalina.tribes.Member; import org.apache.catalina.tribes.group.AbsoluteOrder; import org.apache.catalina.tribes.group.ChannelInterceptorBase; import org.apache.catalina.tribes.io.ChannelData; import org.apache.catalina.tribes.io.XByteBuffer; import org.apache.catalina.tribes.util.StringManager; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; public class StaticMembershipInterceptor extends ChannelInterceptorBase implements StaticMembershipInterceptorMBean { private static final Log log = LogFactory.getLog(StaticMembershipInterceptor.class); protected static final StringManager sm = StringManager.getManager(StaticMembershipInterceptor.class); protected static final byte[] MEMBER_START = new byte[] { 76, 111, 99, 97, 108, 32, 83, 116, 97, 116, 105, 99, 77, 101, 109, 98, 101, 114, 32, 78, 111, 116, 105, 102, 105, 99, 97, 116, 105, 111, 110, 32, 68, 97, 116, 97}; protected static final byte[] MEMBER_STOP = new byte[] { 76, 111, 99, 97, 108, 32, 83, 116, 97, 116, 105, 99, 77, 101, 109, 98, 101, 114, 32, 83, 104, 117, 116, 100, 111, 119, 110, 32, 68, 97, 116, 97}; protected final ArrayList<Member> members = new ArrayList<>(); protected Member localMember = null; public StaticMembershipInterceptor() { super(); } public void addStaticMember(Member member) { synchronized (members) { if (!members.contains(member)) members.add(member); } } public void removeStaticMember(Member member) { synchronized (members) { if (members.contains(member)) members.remove(member); } } public void setLocalMember(Member member) { this.localMember = member; localMember.setLocal(true); } @Override public void messageReceived(ChannelMessage msg) { if (msg.getMessage().getLength() == MEMBER_START.length && Arrays.equals(MEMBER_START, msg.getMessage().getBytes())) { // receive member start Member member = getMember(msg.getAddress()); if (member != null) { super.memberAdded(member); } } else if (msg.getMessage().getLength() == MEMBER_STOP.length && Arrays.equals(MEMBER_STOP, msg.getMessage().getBytes())) { // receive member shutdown Member member = getMember(msg.getAddress()); if (member != null) { try { member.setCommand(Member.SHUTDOWN_PAYLOAD); super.memberDisappeared(member); } finally { member.setCommand(new byte[0]); } } } else { super.messageReceived(msg); } } /** * has members */ @Override public boolean hasMembers() { return super.hasMembers() || (members.size()>0); } /** * Get all current cluster members * @return all members or empty array */ @Override public Member[] getMembers() { if ( members.size() == 0 ) return super.getMembers(); else { synchronized (members) { Member[] others = super.getMembers(); Member[] result = new Member[members.size() + others.length]; for (int i = 0; i < others.length; i++) result[i] = others[i]; for (int i = 0; i < members.size(); i++) result[i + others.length] = members.get(i); AbsoluteOrder.absoluteOrder(result); return result; }//sync }//end if } /** * * @param mbr Member * @return Member */ @Override public Member getMember(Member mbr) { if ( members.contains(mbr) ) return members.get(members.indexOf(mbr)); else return super.getMember(mbr); } /** * Return the member that represents this node. * * @return Member */ @Override public Member getLocalMember(boolean incAlive) { if (this.localMember != null ) return localMember; else return super.getLocalMember(incAlive); } /** * {@inheritDoc} * <p> * Sends notifications upwards. */ @Override public void start(int svc) throws ChannelException { if ( (Channel.SND_RX_SEQ&svc)==Channel.SND_RX_SEQ ) super.start(Channel.SND_RX_SEQ); if ( (Channel.SND_TX_SEQ&svc)==Channel.SND_TX_SEQ ) super.start(Channel.SND_TX_SEQ); final ChannelInterceptorBase base = this; for (final Member member : members) { Thread t = new Thread() { @Override public void run() { base.memberAdded(member); if (getfirstInterceptor().getMember(member) != null) { sendLocalMember(new Member[]{member}); } } }; t.start(); } super.start(svc & (~Channel.SND_RX_SEQ) & (~Channel.SND_TX_SEQ)); // check required interceptors TcpFailureDetector failureDetector = null; TcpPingInterceptor pingInterceptor = null; ChannelInterceptor prev = getPrevious(); while (prev != null) { if (prev instanceof TcpFailureDetector ) failureDetector = (TcpFailureDetector) prev; if (prev instanceof TcpPingInterceptor) pingInterceptor = (TcpPingInterceptor) prev; prev = prev.getPrevious(); } if (failureDetector == null) { log.warn(sm.getString("staticMembershipInterceptor.no.failureDetector")); } if (pingInterceptor == null) { log.warn(sm.getString("staticMembershipInterceptor.no.pingInterceptor")); } } /** * {@inheritDoc} * <p> * Sends local member shutdown. */ @Override public void stop(int svc) throws ChannelException { // Sends local member shutdown. Member[] members = getfirstInterceptor().getMembers(); sendShutdown(members); super.stop(svc); } protected void sendLocalMember(Member[] members) { try { sendMemberMessage(members, MEMBER_START); } catch (ChannelException cx) { log.warn(sm.getString("staticMembershipInterceptor.sendLocalMember.failed"),cx); } } protected void sendShutdown(Member[] members) { try { sendMemberMessage(members, MEMBER_STOP); } catch (ChannelException cx) { log.warn(sm.getString("staticMembershipInterceptor.sendShutdown.failed"),cx); } } protected ChannelInterceptor getfirstInterceptor() { ChannelInterceptor result = null; ChannelInterceptor now = this; do { result = now; now = now.getPrevious(); } while (now.getPrevious() != null); return result; } protected void sendMemberMessage(Member[] members, byte[] message) throws ChannelException { if ( members == null || members.length == 0 ) return; ChannelData data = new ChannelData(true); data.setAddress(getLocalMember(false)); data.setTimestamp(System.currentTimeMillis()); data.setOptions(getOptionFlag()); data.setMessage(new XByteBuffer(message, false)); super.sendMessage(members, data, null); } }
/* * Phys2D - a 2D physics engine based on the work of Erin Catto. The * original source remains: * * Copyright (c) 2006 Erin Catto http://www.gphysics.com * * This source is provided under the terms of the BSD License. * * Copyright (c) 2006, Phys2D * All rights reserved. * * Redistribution and use in source and binary forms, with or * without modification, are permitted provided that the following * conditions are met: * * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the * following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * Neither the name of the Phys2D/New Dawn Software nor the names of * its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY * OF SUCH DAMAGE... */ package net.phys2d.math; import net.phys2d.math.Vector2f; /** * Simple utility wrapping up a bunch of math operations so that * the rest of the code doesn't have to look so cluttered. * * @author Kevin Glass */ public final strictfp class MathUtil { /** Prevent construction */ private MathUtil() {} /** * Scale a vector by a given value * * @param a The vector to be scaled * @param scale The amount to scale the vector by * @return A newly created vector - a scaled version of the new vector */ public static Vector2f scale(ROVector2f a, float scale) { Vector2f temp = new Vector2f(a); temp.scale(scale); return temp; } /** * Subtract one vector from another * * @param a The vector to be subtracted from * @param b The vector to subtract * @return A newly created containing the result */ public static Vector2f sub(ROVector2f a,ROVector2f b) { Vector2f temp = new Vector2f(a); temp.sub(b); return temp; } /** * Check the sign of a value * * @param x The value to check * @return -1.0f if negative, 1.0 if positive */ public static float sign(float x) { return x < 0.0f ? -1.0f : 1.0f; } /** * Multiply a matrix by a vector * * @param A The matrix to be multiplied * @param v The vector to multiple by * @return A newly created vector containing the resultant vector */ public static Vector2f mul(Matrix2f A, ROVector2f v) { return new Vector2f(A.col1.x * v.getX() + A.col2.x * v.getY(), A.col1.y * v.getX() + A.col2.y * v.getY()); } /** * Multiple two matricies * * @param A The first matrix * @param B The second matrix * @return A newly created matrix containing the result */ public static Matrix2f mul(Matrix2f A, Matrix2f B) { return new Matrix2f(mul(A,B.col1), mul(A,B.col2)); } /** * Create the absolute version of a matrix * * @param A The matrix to make absolute * @return A newly created absolute matrix */ public static Matrix2f abs(Matrix2f A) { return new Matrix2f(abs(A.col1), abs(A.col2)); } /** * Make a vector absolute * * @param a The vector to make absolute * @return A newly created result vector */ public static Vector2f abs(Vector2f a) { return new Vector2f(Math.abs(a.x), Math.abs(a.y)); } /** * Add two matricies * * @param A The first matrix * @param B The second matrix * @return A newly created matrix containing the result */ public static Matrix2f add(Matrix2f A, Matrix2f B) { Vector2f temp1 = new Vector2f(A.col1); temp1.add(B.col1); Vector2f temp2 = new Vector2f(A.col2); temp2.add(B.col2); return new Matrix2f(temp1,temp2); } /** * Find the cross product of two vectors * * @param a The first vector * @param b The second vector * @return The cross product of the two vectors */ public static float cross(Vector2f a, Vector2f b) { return a.x * b.y - a.y * b.x; } /** * Find the cross product of a vector and a float * * @param s The scalar float * @param a The vector to fidn the cross of * @return A newly created resultant vector */ public static Vector2f cross(float s, Vector2f a) { return new Vector2f(-s * a.y, s * a.x); } /** * Find the cross product of a vector and a float * * @param s The scalar float * @param a The vector to fidn the cross of * @return A newly created resultant vector */ public static Vector2f cross(Vector2f a, float s) { return new Vector2f(s * a.y, -s * a.x); } /** * Clamp a value * * @param a The original value * @param low The lower bound * @param high The upper bound * @return The clamped value */ public static float clamp(float a, float low, float high) { return Math.max(low, Math.min(a, high)); } /** * Get the normal of a line x y (or edge). * When standing on x facing y, the normal will point * to the left. * * TODO: move this function somewhere else? * * @param x startingpoint of the line * @param y endpoint of the line * @return a (normalised) normal */ public static Vector2f getNormal(ROVector2f x, ROVector2f y) { Vector2f normal = new Vector2f(y); normal.sub(x); normal = new Vector2f(normal.y, -normal.x); normal.normalise(); return normal; } // public static Vector2f intersect(Vector2f startA, Vector2f endA, Vector2f startB, Vector2f endB) { // float d = (endB.y - startB.y) * (endA.x - startA.x) - (endB.x - startB.x) * (endA.y - startA.y); // // if ( d == 0 ) // parallel lines // return null; // // float uA = (endB.x - startB.x) * (startA.y - startB.y) - (endB.y - startB.y) * (startA.x - startB.x); // uA /= d; // float uB = (endA.x - startA.x) * (startA.y - startB.y) - (endA.y - startA.y) * (startA.x - startB.x); // uB /= d; // // if ( uA < 0 || uA > 1 || uB < 0 || uB > 1 ) // return null; // intersection point isn't between the start and endpoints // // return new Vector2f( // startA.x + uA * (endA.x - startA.x), // startA.y + uA * (endA.y - startA.y)); // } }
/* * Copyright 1999-2101 Alibaba Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mimosaframework.core.json.parser; import java.io.Closeable; import java.io.File; import java.io.Serializable; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.lang.reflect.WildcardType; import java.math.BigDecimal; import java.math.BigInteger; import java.net.Inet4Address; import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URI; import java.net.URL; import java.nio.charset.Charset; import java.security.AccessControlException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Currency; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TimeZone; import java.util.TreeMap; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicIntegerArray; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLongArray; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; import org.mimosaframework.core.json.ModelArray; import org.mimosaframework.core.json.ModelException; import org.mimosaframework.core.json.ModelObject; import org.mimosaframework.core.json.ModelPath; import org.mimosaframework.core.json.annotation.JSONField; import org.mimosaframework.core.json.annotation.JSONType; import org.mimosaframework.core.json.parser.deserializer.ASMDeserializerFactory; import org.mimosaframework.core.json.parser.deserializer.ArrayListTypeFieldDeserializer; import org.mimosaframework.core.json.parser.deserializer.AutowiredObjectDeserializer; import org.mimosaframework.core.json.parser.deserializer.DefaultFieldDeserializer; import org.mimosaframework.core.json.parser.deserializer.EnumDeserializer; import org.mimosaframework.core.json.parser.deserializer.FieldDeserializer; import org.mimosaframework.core.json.parser.deserializer.JavaBeanDeserializer; import org.mimosaframework.core.json.parser.deserializer.JavaObjectDeserializer; import org.mimosaframework.core.json.parser.deserializer.Jdk8DateCodec; import org.mimosaframework.core.json.parser.deserializer.MapDeserializer; import org.mimosaframework.core.json.parser.deserializer.NumberDeserializer; import org.mimosaframework.core.json.parser.deserializer.ObjectDeserializer; import org.mimosaframework.core.json.parser.deserializer.OptionalCodec; import org.mimosaframework.core.json.parser.deserializer.SqlDateDeserializer; import org.mimosaframework.core.json.parser.deserializer.StackTraceElementDeserializer; import org.mimosaframework.core.json.parser.deserializer.ThrowableDeserializer; import org.mimosaframework.core.json.parser.deserializer.TimeDeserializer; import org.mimosaframework.core.json.serializer.AtomicCodec; import org.mimosaframework.core.json.serializer.AwtCodec; import org.mimosaframework.core.json.serializer.BigDecimalCodec; import org.mimosaframework.core.json.serializer.BigIntegerCodec; import org.mimosaframework.core.json.serializer.BooleanCodec; import org.mimosaframework.core.json.serializer.CalendarCodec; import org.mimosaframework.core.json.serializer.CharArrayCodec; import org.mimosaframework.core.json.serializer.CharacterCodec; import org.mimosaframework.core.json.serializer.CollectionCodec; import org.mimosaframework.core.json.serializer.DateCodec; import org.mimosaframework.core.json.serializer.FloatCodec; import org.mimosaframework.core.json.serializer.IntegerCodec; import org.mimosaframework.core.json.serializer.LongCodec; import org.mimosaframework.core.json.serializer.MiscCodec; import org.mimosaframework.core.json.serializer.ObjectArrayCodec; import org.mimosaframework.core.json.serializer.ReferenceCodec; import org.mimosaframework.core.json.serializer.StringCodec; import org.mimosaframework.core.json.util.ASMClassLoader; import org.mimosaframework.core.json.util.ASMUtils; import org.mimosaframework.core.json.util.FieldInfo; import org.mimosaframework.core.json.util.IdentityHashMap; import org.mimosaframework.core.json.util.JavaBeanInfo; import org.mimosaframework.core.json.util.ServiceLoader; /** * @author wenshao[szujobs@hotmail.com] */ public class ParserConfig { public final static String DENY_PROPERTY = "fastjson.parser.deny"; public static ParserConfig getGlobalInstance() { return global; } public static ParserConfig global = new ParserConfig(); private final IdentityHashMap<Type, ObjectDeserializer> derializers = new IdentityHashMap<Type, ObjectDeserializer>(); private boolean asmEnable = !ASMUtils.IS_ANDROID; public final SymbolTable symbolTable = new SymbolTable(4096); protected ClassLoader defaultClassLoader; protected ASMDeserializerFactory asmFactory; private static boolean awtError = false; private static boolean jdk8Error = false; private String[] denyList = new String[] { "java.lang.Thread" }; public ParserConfig(){ this(null, null); } public ParserConfig(ClassLoader parentClassLoader){ this(null, parentClassLoader); } public ParserConfig(ASMDeserializerFactory asmFactory){ this(asmFactory, null); } private ParserConfig(ASMDeserializerFactory asmFactory, ClassLoader parentClassLoader){ if (asmFactory == null && !ASMUtils.IS_ANDROID) { try { if (parentClassLoader == null) { asmFactory = new ASMDeserializerFactory(new ASMClassLoader()); } else { asmFactory = new ASMDeserializerFactory(parentClassLoader); } } catch (ExceptionInInitializerError error) { // skip } catch (AccessControlException error) { // skip } catch (NoClassDefFoundError error) { // skip } } this.asmFactory = asmFactory; if (asmFactory == null) { asmEnable = false; } derializers.put(SimpleDateFormat.class, MiscCodec.instance); derializers.put(java.sql.Timestamp.class, SqlDateDeserializer.instance_timestamp); derializers.put(java.sql.Date.class, SqlDateDeserializer.instance); derializers.put(java.sql.Time.class, TimeDeserializer.instance); derializers.put(java.util.Date.class, DateCodec.instance); derializers.put(Calendar.class, CalendarCodec.instance); derializers.put(ModelObject.class, MapDeserializer.instance); derializers.put(ModelArray.class, CollectionCodec.instance); derializers.put(Map.class, MapDeserializer.instance); derializers.put(HashMap.class, MapDeserializer.instance); derializers.put(LinkedHashMap.class, MapDeserializer.instance); derializers.put(TreeMap.class, MapDeserializer.instance); derializers.put(ConcurrentMap.class, MapDeserializer.instance); derializers.put(ConcurrentHashMap.class, MapDeserializer.instance); derializers.put(Collection.class, CollectionCodec.instance); derializers.put(List.class, CollectionCodec.instance); derializers.put(ArrayList.class, CollectionCodec.instance); derializers.put(Object.class, JavaObjectDeserializer.instance); derializers.put(String.class, StringCodec.instance); derializers.put(StringBuffer.class, StringCodec.instance); derializers.put(StringBuilder.class, StringCodec.instance); derializers.put(char.class, CharacterCodec.instance); derializers.put(Character.class, CharacterCodec.instance); derializers.put(byte.class, NumberDeserializer.instance); derializers.put(Byte.class, NumberDeserializer.instance); derializers.put(short.class, NumberDeserializer.instance); derializers.put(Short.class, NumberDeserializer.instance); derializers.put(int.class, IntegerCodec.instance); derializers.put(Integer.class, IntegerCodec.instance); derializers.put(long.class, LongCodec.instance); derializers.put(Long.class, LongCodec.instance); derializers.put(BigInteger.class, BigIntegerCodec.instance); derializers.put(BigDecimal.class, BigDecimalCodec.instance); derializers.put(float.class, FloatCodec.instance); derializers.put(Float.class, FloatCodec.instance); derializers.put(double.class, NumberDeserializer.instance); derializers.put(Double.class, NumberDeserializer.instance); derializers.put(boolean.class, BooleanCodec.instance); derializers.put(Boolean.class, BooleanCodec.instance); derializers.put(Class.class, MiscCodec.instance); derializers.put(char[].class, new CharArrayCodec()); derializers.put(AtomicBoolean.class, BooleanCodec.instance); derializers.put(AtomicInteger.class, IntegerCodec.instance); derializers.put(AtomicLong.class, LongCodec.instance); derializers.put(AtomicReference.class, ReferenceCodec.instance); derializers.put(WeakReference.class, ReferenceCodec.instance); derializers.put(SoftReference.class, ReferenceCodec.instance); derializers.put(UUID.class, MiscCodec.instance); derializers.put(TimeZone.class, MiscCodec.instance); derializers.put(Locale.class, MiscCodec.instance); derializers.put(Currency.class, MiscCodec.instance); derializers.put(InetAddress.class, MiscCodec.instance); derializers.put(Inet4Address.class, MiscCodec.instance); derializers.put(Inet6Address.class, MiscCodec.instance); derializers.put(InetSocketAddress.class, MiscCodec.instance); derializers.put(File.class, MiscCodec.instance); derializers.put(URI.class, MiscCodec.instance); derializers.put(URL.class, MiscCodec.instance); derializers.put(Pattern.class, MiscCodec.instance); derializers.put(Charset.class, MiscCodec.instance); derializers.put(ModelPath.class, MiscCodec.instance); derializers.put(Number.class, NumberDeserializer.instance); derializers.put(AtomicIntegerArray.class, AtomicCodec.instance); derializers.put(AtomicLongArray.class, AtomicCodec.instance); derializers.put(StackTraceElement.class, StackTraceElementDeserializer.instance); derializers.put(Serializable.class, JavaObjectDeserializer.instance); derializers.put(Cloneable.class, JavaObjectDeserializer.instance); derializers.put(Comparable.class, JavaObjectDeserializer.instance); derializers.put(Closeable.class, JavaObjectDeserializer.instance); addDeny("java.lang.Thread"); configFromPropety(System.getProperties()); } public void configFromPropety(Properties properties) { String property = properties.getProperty(DENY_PROPERTY); if (property != null && property.length() > 0) { String[] items = property.split(","); for (int i = 0; i < items.length; ++i) { String item = items[i]; this.addDeny(item); } } } public boolean isAsmEnable() { return asmEnable; } public void setAsmEnable(boolean asmEnable) { this.asmEnable = asmEnable; } public IdentityHashMap<Type, ObjectDeserializer> getDerializers() { return derializers; } public ObjectDeserializer getDeserializer(Type type) { ObjectDeserializer derializer = this.derializers.get(type); if (derializer != null) { return derializer; } if (type instanceof Class<?>) { return getDeserializer((Class<?>) type, type); } if (type instanceof ParameterizedType) { Type rawType = ((ParameterizedType) type).getRawType(); if (rawType instanceof Class<?>) { return getDeserializer((Class<?>) rawType, type); } else { return getDeserializer(rawType); } } return JavaObjectDeserializer.instance; } public ObjectDeserializer getDeserializer(Class<?> clazz, Type type) { ObjectDeserializer derializer = derializers.get(type); if (derializer != null) { return derializer; } if (type == null) { type = clazz; } derializer = derializers.get(type); if (derializer != null) { return derializer; } { JSONType annotation = clazz.getAnnotation(JSONType.class); if (annotation != null) { Class<?> mappingTo = annotation.mappingTo(); if (mappingTo != Void.class) { return getDeserializer(mappingTo, mappingTo); } } } if (type instanceof WildcardType || type instanceof TypeVariable || type instanceof ParameterizedType) { derializer = derializers.get(clazz); } if (derializer != null) { return derializer; } String className = clazz.getName(); for (int i = 0; i < denyList.length; ++i) { String deny = denyList[i]; className = className.replace('$', '.'); if (className.startsWith(deny)) { throw new ModelException("parser deny : " + className); } } if (className.startsWith("java.awt.") // && AwtCodec.support(clazz)) { if (!awtError) { try { derializers.put(Class.forName("java.awt.Point"), AwtCodec.instance); derializers.put(Class.forName("java.awt.Font"), AwtCodec.instance); derializers.put(Class.forName("java.awt.Rectangle"), AwtCodec.instance); derializers.put(Class.forName("java.awt.Color"), AwtCodec.instance); } catch (Throwable e) { // skip awtError = true; } derializer = AwtCodec.instance; } } if (!jdk8Error) { try { if (className.startsWith("java.time.")) { derializers.put(Class.forName("java.time.LocalDateTime"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.LocalDate"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.LocalTime"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.ZonedDateTime"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.OffsetDateTime"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.OffsetTime"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.ZoneOffset"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.ZoneRegion"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.ZoneId"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.Period"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.Duration"), Jdk8DateCodec.instance); derializers.put(Class.forName("java.time.Instant"), Jdk8DateCodec.instance); derializer = derializers.get(clazz); } else if (className.startsWith("java.util.Optional")) { derializers.put(Class.forName("java.util.Optional"), OptionalCodec.instance); derializers.put(Class.forName("java.util.OptionalDouble"), OptionalCodec.instance); derializers.put(Class.forName("java.util.OptionalInt"), OptionalCodec.instance); derializers.put(Class.forName("java.util.OptionalLong"), OptionalCodec.instance); derializer = derializers.get(clazz); } } catch (Throwable e) { // skip jdk8Error = true; } } if (className.equals("java.nio.file.Path")) { derializers.put(clazz, MiscCodec.instance); } final ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); try { for (AutowiredObjectDeserializer autowired : ServiceLoader.load(AutowiredObjectDeserializer.class, classLoader)) { for (Type forType : autowired.getAutowiredFor()) { derializers.put(forType, autowired); } } } catch (Exception ex) { // skip } if (derializer == null) { derializer = derializers.get(type); } if (derializer != null) { return derializer; } if (clazz.isEnum()) { derializer = new EnumDeserializer(clazz); } else if (clazz.isArray()) { derializer = ObjectArrayCodec.instance; } else if (clazz == Set.class || clazz == HashSet.class || clazz == Collection.class || clazz == List.class || clazz == ArrayList.class) { derializer = CollectionCodec.instance; } else if (Collection.class.isAssignableFrom(clazz)) { derializer = CollectionCodec.instance; } else if (Map.class.isAssignableFrom(clazz)) { derializer = MapDeserializer.instance; } else if (Throwable.class.isAssignableFrom(clazz)) { derializer = new ThrowableDeserializer(this, clazz); } else { derializer = createJavaBeanDeserializer(clazz, type); } putDeserializer(type, derializer); return derializer; } public ObjectDeserializer createJavaBeanDeserializer(Class<?> clazz, Type type) { boolean asmEnable = this.asmEnable; if (asmEnable) { JSONType jsonType = clazz.getAnnotation(JSONType.class); if (jsonType != null && !jsonType.asm()) { asmEnable = false; } if (asmEnable) { Class<?> superClass = JavaBeanInfo.getBuilderClass(jsonType); if (superClass == null) { superClass = clazz; } for (;;) { if (!Modifier.isPublic(superClass.getModifiers())) { asmEnable = false; break; } superClass = superClass.getSuperclass(); if (superClass == Object.class || superClass == null) { break; } } } } if (clazz.getTypeParameters().length != 0) { asmEnable = false; } if (asmEnable && asmFactory != null && asmFactory.classLoader.isExternalClass(clazz)) { asmEnable = false; } if (asmEnable) { asmEnable = ASMUtils.checkName(clazz.getName()); } if (asmEnable) { if (clazz.isInterface()) { asmEnable = false; } JavaBeanInfo beanInfo = JavaBeanInfo.build(clazz, type); if (asmEnable && beanInfo.fields.length > 200) { asmEnable = false; } Constructor<?> defaultConstructor = beanInfo.defaultConstructor; if (asmEnable && defaultConstructor == null && !clazz.isInterface()) { asmEnable = false; } for (FieldInfo fieldInfo : beanInfo.fields) { if (fieldInfo.getOnly) { asmEnable = false; break; } Class<?> fieldClass = fieldInfo.fieldClass; if (!Modifier.isPublic(fieldClass.getModifiers())) { asmEnable = false; break; } if (fieldClass.isMemberClass() && !Modifier.isStatic(fieldClass.getModifiers())) { asmEnable = false; break; } if (fieldInfo.getMember() != null // && !ASMUtils.checkName(fieldInfo.getMember().getName())) { asmEnable = false; break; } JSONField annotation = fieldInfo.getAnnotation(); if (annotation != null // && ((!ASMUtils.checkName(annotation.name())) // || annotation.format().length() != 0)) { asmEnable = false; break; } if (fieldClass.isEnum()) { // EnumDeserializer ObjectDeserializer fieldDeser = this.getDeserializer(fieldClass); if (!(fieldDeser instanceof EnumDeserializer)) { asmEnable = false; break; } } } } if (asmEnable) { if (clazz.isMemberClass() && !Modifier.isStatic(clazz.getModifiers())) { asmEnable = false; } } if (!asmEnable) { return new JavaBeanDeserializer(this, clazz, type); } JavaBeanInfo beanInfo = JavaBeanInfo.build(clazz, type); try { return asmFactory.createJavaBeanDeserializer(this, beanInfo); // } catch (VerifyError e) { // e.printStackTrace(); // return new JavaBeanDeserializer(this, clazz, type); } catch (NoSuchMethodException ex) { return new JavaBeanDeserializer(this, clazz, type); } catch (ModelException asmError) { return new JavaBeanDeserializer(this, beanInfo); } catch (Exception e) { throw new ModelException("create asm deserializer error, " + clazz.getName(), e); } } public FieldDeserializer createFieldDeserializer(ParserConfig mapping, // JavaBeanInfo beanInfo, // FieldInfo fieldInfo) { Class<?> clazz = beanInfo.clazz; Class<?> fieldClass = fieldInfo.fieldClass; if (fieldClass == List.class || fieldClass == ArrayList.class) { return new ArrayListTypeFieldDeserializer(mapping, clazz, fieldInfo); } return new DefaultFieldDeserializer(mapping, clazz, fieldInfo); } public void putDeserializer(Type type, ObjectDeserializer deserializer) { derializers.put(type, deserializer); } public ObjectDeserializer getDeserializer(FieldInfo fieldInfo) { return getDeserializer(fieldInfo.fieldClass, fieldInfo.fieldType); } public static boolean isPrimitive(Class<?> clazz) { return clazz.isPrimitive() // || clazz == Boolean.class // || clazz == Character.class // || clazz == Byte.class // || clazz == Short.class // || clazz == Integer.class // || clazz == Long.class // || clazz == Float.class // || clazz == Double.class // || clazz == BigInteger.class // || clazz == BigDecimal.class // || clazz == String.class // || clazz == java.util.Date.class // || clazz == java.sql.Date.class // || clazz == java.sql.Time.class // || clazz == java.sql.Timestamp.class // || clazz.isEnum() // ; } public static Field getField(Class<?> clazz, String fieldName) { Field field = getField0(clazz, fieldName); if (field == null) { field = getField0(clazz, "_" + fieldName); } if (field == null) { field = getField0(clazz, "m_" + fieldName); } return field; } private static Field getField0(Class<?> clazz, String fieldName) { for (Field item : clazz.getDeclaredFields()) { if (fieldName.equals(item.getName())) { return item; } } if (clazz.getSuperclass() != null && clazz.getSuperclass() != Object.class) { return getField(clazz.getSuperclass(), fieldName); } return null; } public ClassLoader getDefaultClassLoader() { return defaultClassLoader; } public void setDefaultClassLoader(ClassLoader defaultClassLoader) { this.defaultClassLoader = defaultClassLoader; } public void addDeny(String name) { if (name == null || name.length() == 0) { return; } String[] denyList = new String[this.denyList.length + 1]; System.arraycopy(this.denyList, 0, denyList, 0, this.denyList.length); denyList[denyList.length - 1] = name; this.denyList = denyList; } }
/* * ============================================================================= * Simplified BSD License, see http://www.opensource.org/licenses/ * ----------------------------------------------------------------------------- * Copyright (c) 2008-2009, Marco Terzer, Zurich, Switzerland * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Swiss Federal Institute of Technology Zurich * nor the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * ============================================================================= */ package ch.javasoft.metabolic.util; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Writer; import java.text.Collator; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.logging.Level; import java.util.logging.Logger; import ch.javasoft.metabolic.FluxDistribution; import ch.javasoft.metabolic.MetabolicNetwork; import ch.javasoft.metabolic.Metabolite; import ch.javasoft.metabolic.Reaction; import ch.javasoft.util.genarr.ArrayIterable; import ch.javasoft.util.logging.LogWriter; /** * Utility class to output metabolic network info, e.g. to print the network * size or print all reactions, either to a file, output stream or logger. */ public class Output { private final PrintWriter mPrintWriter; private boolean mAutoFlush = true; public Output() { this(System.out); } public Output(File file) throws IOException { this(new FileWriter(file)); } public Output(OutputStream out) { this(new PrintWriter(new OutputStreamWriter(out))); } public Output(Logger logger, Level level) { this(new LogWriter(logger, level)); } public Output(Writer writer) { this(writer instanceof PrintWriter ? (PrintWriter)writer : new PrintWriter(writer)); } public Output(PrintWriter printWriter) { mPrintWriter = printWriter; } public final void setAutoFlush(boolean set) { mAutoFlush = set; } public final boolean isAutoFlush() { return mAutoFlush; } public final PrintWriter writer() { return mPrintWriter; } /** * Prints reaction names and formulas, separated by colon and tab. If * desired, reactions can be sorted by name, and a newline can be appended * after each reaction. */ public void printReactions(MetabolicNetwork net, boolean sort, boolean newLineAfterReaction) { printReactions(net.getReactions().toGenericArray(false), sort, newLineAfterReaction); } /** * Prints reaction names and formulas, separated by colon and tab. If * desired, reactions can be sorted by name, and a newline can be appended * after each reaction. */ public void printReactions(Collection<? extends Reaction> reactions, boolean sort, boolean newLineAfterReaction) { Iterable<? extends Reaction> it; if (sort) { List<Reaction> list = new ArrayList<Reaction>(reactions);//clone Collections.sort(list, new Comparator<Reaction>() { private final Collator mCollator = Collator.getInstance(Locale.US); public int compare(Reaction o1, Reaction o2) { return mCollator.compare(o1.getName(), o2.getName()); } }); it = list; } else { it = reactions; } for (Reaction reac : it) { printReaction(reac, newLineAfterReaction); } if (mAutoFlush) flush(); } /** * Prints reaction name and formula, separated by colon and tab. If * desired, the print statement is terminated with a newline string. */ public void printReaction(Reaction reac, boolean newLineAfterReaction) { mPrintWriter.print(reac.getName() + ":\t"); mPrintWriter.print(reac); if (newLineAfterReaction) mPrintWriter.println(); else mPrintWriter.print(", "); } /** * Prints metabolite names for the network. If desired, metabolites are * previously sorted alphabetically, and a newline can be appended after * each metabolite. */ public void printMetabolites(MetabolicNetwork net, boolean sort, boolean newLineAfterMetabolite) { printMetabolites(net.getMetabolites().toGenericArray(false), sort, newLineAfterMetabolite); } /** * Prints metabolite names for the network. If desired, metabolites are * previously sorted alphabetically, and a newline can be appended after * each metabolite. */ public void printMetabolites(Collection<? extends Metabolite> metabolites, boolean sort, boolean newLineAfterMetabolite) { Iterable<? extends Metabolite> it; if (sort) { List<Metabolite> list = new ArrayList<Metabolite>(metabolites);//clone Collections.sort(list, new Comparator<Metabolite>() { private final Collator mCollator = Collator.getInstance(Locale.US); public int compare(Metabolite r1, Metabolite r2) { return mCollator.compare(r1.getName(), r2.getName()); } }); it = list; } else { it = metabolites; } for (Metabolite meta : it) { mPrintWriter.print(meta); if (newLineAfterMetabolite) mPrintWriter.println(); else mPrintWriter.print(", "); } if (mAutoFlush) flush(); } /** * Prints the network size (#metabolites, #reactions, #reversible reactions), * prepending the given prefix and appending newline. * * @see MetabolicNetworkUtil#getNetworkSizeString(String, MetabolicNetwork) */ public void printNetworkSize(String prefix, MetabolicNetwork net) { mPrintWriter.println(MetabolicNetworkUtil.getNetworkSizeString(prefix, net)); if (mAutoFlush) flush(); } @SuppressWarnings("all") /** * print the flux modes in the format which is parsable by * {@link ch.javasoft.metabolic.parse.FluxAnalyserParser#parseEfms(MetabolicNetwork, Reader, int, boolean)} * * @param fluxModes The modes to write to the output */ public void printFluxModes(Iterable<FluxDistribution> fluxModes) { printFluxModesInternal(fluxModes); } private void printFluxModesInternal(Iterable<FluxDistribution> fluxModes) { Iterator<FluxDistribution> it = fluxModes.iterator(); if (!it.hasNext()) return; MetabolicNetwork net = it.next().getNetwork(); ArrayIterable<? extends Reaction> reacts = net.getReactions(); for (int ii = 0; ii < reacts.length(); ii++) { if (ii > 0) mPrintWriter.print("\t"); mPrintWriter.print(reacts.get(ii).getName()); } mPrintWriter.println(); for (FluxDistribution flux : fluxModes) { if (flux.getSize() != reacts.length()) { throw new IllegalArgumentException( "flux count (" + flux.getSize() + ") not equal to number of reactions (" + reacts.length() + ")" ); } for (int ii = 0; ii < flux.getSize(); ii++) { if (ii > 0) mPrintWriter.print("\t"); mPrintWriter.print(flux.getNumberRate(ii)); } mPrintWriter.println(); } if (mAutoFlush) flush(); } public void flush() { mPrintWriter.flush(); } }
/* * The MIT License (MIT) * * Copyright (c) 2014-2017 Sri Harsha Chilakapati * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.shc.silenceengine.scene.tiled; import com.shc.easyxml.XmlTag; import com.shc.silenceengine.core.SilenceException; import com.shc.silenceengine.io.FilePath; import com.shc.silenceengine.math.Vector2; import com.shc.silenceengine.scene.tiled.tiles.TmxTerrain; import com.shc.silenceengine.scene.tiled.tiles.TmxTile; import java.util.ArrayList; import java.util.List; /** * @author Sri Harsha Chilakapati */ public class TmxTileSet { private int firstGID; private String name; private int tileWidth; private int tileHeight; private int spacing; private int margin; private Vector2 tileOffset; private TmxImage image; private List<TmxTerrain> terrainTypes; private List<TmxTile> tiles; private TmxProperties properties; public TmxTileSet() { tileOffset = new Vector2(); terrainTypes = new ArrayList<>(); tiles = new ArrayList<>(); properties = new TmxProperties(); } public void parse(XmlTag element, FilePath path) { firstGID = Integer.parseInt(element.getAttribute("firstgid").value); if (element.getAttribute("source") != null) { throw new SilenceException("External tile sets aren't supported yet"); } tileWidth = Integer.parseInt(element.getAttribute("tilewidth").value); tileHeight = Integer.parseInt(element.getAttribute("tileheight").value); margin = element.getAttribute("margin") != null ? Integer.parseInt(element.getAttribute("margin").value) : 0; spacing = element.getAttribute("spacing") != null ? Integer.parseInt(element.getAttribute("spacing").value) : 0; name = element.getAttribute("name").value; List<XmlTag> nodes = element.getTagsByName("tileoffset"); if (nodes.size() > 0) { XmlTag childElement = nodes.get(0); tileOffset.x = Float.parseFloat(childElement.getAttribute("x").value); tileOffset.y = Float.parseFloat(childElement.getAttribute("y").value); } nodes = element.getTagsByName("terraintypes"); if (nodes.size() > 0) { for (XmlTag terrain : nodes) { TmxTerrain terrainType = new TmxTerrain(); terrainType.parse(terrain); terrainTypes.add(terrainType); } } nodes = element.getTagsByName("image"); if (nodes.size() > 0) { image = new TmxImage(); image.parse(nodes.get(0), path); } int tileCount = (image.getWidth() / tileWidth) * (image.getHeight() / tileHeight); for (int tID = tiles.size(); tID < tileCount; tID++) { TmxTile tile = new TmxTile(tID); tiles.add(tile); } nodes = element.getTagsByName("tile"); if (nodes.size() > 0) { for (int i = 0; i < nodes.size(); i++) { XmlTag tileNode = nodes.get(i); TmxTile tile = new TmxTile(); tile.parse(tileNode); tiles.get(tile.getID()).parse(tileNode); } } nodes = element.getTagsByName("properties"); if (nodes.size() > 0) { properties.parse(nodes.get(0)); } } public int getFirstGID() { return firstGID; } public String getName() { return name; } public int getTileWidth() { return tileWidth; } public int getTileHeight() { return tileHeight; } public int getSpacing() { return spacing; } public int getMargin() { return margin; } public Vector2 getTileOffset() { return tileOffset; } public TmxImage getImage() { return image; } public List<TmxTerrain> getTerrainTypes() { return terrainTypes; } public TmxTile getTile(long id) { for (TmxTile tile : tiles) { if (tile.getID() == id) return tile; } return null; } public List<TmxTile> getTiles() { return tiles; } public TmxProperties getProperties() { return properties; } }
package com.owera.xaps.dbi; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import com.owera.common.db.ConnectionProperties; import com.owera.common.db.ConnectionProvider; import com.owera.common.db.NoAvailableConnectionException; import com.owera.common.log.Logger; import com.owera.xaps.dbi.util.SyslogClient; import com.owera.xaps.dbi.util.XAPSVersionCheck; /** * XAPSUnit is a class to help you work with units and unit parameters. * */ public class XAPSUnit { private static Logger logger = new Logger(); private static long updateCounter; private static long insertCounter; private ConnectionProperties connectionProperties; private Syslog syslog; private XAPS xaps; public XAPSUnit(ConnectionProperties connectionProperties, XAPS xaps, Syslog syslog) throws NoAvailableConnectionException, SQLException { this.connectionProperties = connectionProperties; this.syslog = syslog; if (xaps == null) throw new IllegalArgumentException("The XAPSUnit constructor requires a non-null XAPS object"); this.xaps = xaps; if (xaps.getUnittypes() == null) { xaps.read(); } } /** * * @param value - expected to be a unique unit parameter value (ex: serialnumber, mac, ip, etc) * @param unittype - may be null * @param profile - may be null * @return a unit object will all unit parameters found * @throws SQLException * @throws NoAvailableConnectionException */ public Unit getUnitByValue(String value, Unittype unittype, Profile profile) throws SQLException, NoAvailableConnectionException { Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); UnitQueryCrossUnittype uqcu = new UnitQueryCrossUnittype(connection, xaps, unittype, profile); Unit u = uqcu.getUnitByValue(value); if (u != null && XAPSVersionCheck.unitParamSessionSupported && u.isSessionMode()) return uqcu.addSessionParameters(u); else return u; } finally { if (connection != null) { ConnectionProvider.returnConnection(connection, null); } } } public Unit getLimitedUnitByValue(String value) throws SQLException, NoAvailableConnectionException { Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); UnitQueryCrossUnittype uqcu = new UnitQueryCrossUnittype(connection, xaps, (Unittype) null, (Profile) null); return uqcu.getLimitedUnitByValue(value); } finally { if (connection != null) { ConnectionProvider.returnConnection(connection, null); } } } /** * * @param unitId * @param unittype - may be null * @param profile - may be null * @return a unit object will all unit parameters found * @throws SQLException * @throws NoAvailableConnectionException */ public Unit getUnitById(String unitId, Unittype unittype, Profile profile) throws SQLException, NoAvailableConnectionException { Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); UnitQueryCrossUnittype uqcu = new UnitQueryCrossUnittype(connection, xaps, unittype, profile); Unit u = uqcu.getUnitById(unitId); if (u != null && XAPSVersionCheck.unitParamSessionSupported && u.isSessionMode()) return uqcu.addSessionParameters(u); else return u; } finally { if (connection != null) { ConnectionProvider.returnConnection(connection, null); } } } public Unit getUnitById(String unitId) throws SQLException, NoAvailableConnectionException { return getUnitById(unitId, null, null); } /** * This list will be INSERTed into the UNIT-table, and connected to the * given profile (and thereby to the correct unittype). * * The function cannot change the profile or unittype for an already * existing unitid. For changing the profile, use the moveUnit()-function, * for changing the unittype you would have to delete all units from the * current unittype, and add them to the new one. The reason for this is * that you should make a new set of unittype-parameters as well when you do * that. * * @param unitIds * @throws SQLException * @throws NoAvailableConnectionException */ public void addUnits(List<String> unitIds, Profile profile) throws SQLException, NoAvailableConnectionException { Connection connection = null; PreparedStatement ps = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); Unittype unittype = profile.getUnittype(); for (int i = 0; unitIds != null && i < unitIds.size(); i++) { String unitId = unitIds.get(i); DynamicStatement ds = new DynamicStatement(); ds.addSql("INSERT INTO unit (unit_id, unit_type_id, profile_id) VALUES (?,?,?)"); ds.addArguments(unitId, unittype.getId(), profile.getId()); try { ps = ds.makePreparedStatement(connection); ps.setQueryTimeout(60); ps.executeUpdate(); SyslogClient.notice(unitId, "Added unit", syslog); logger.notice("Added unit " + unitId); } catch (SQLException ex) { ds = new DynamicStatement(); ds.addSql("UPDATE unit SET profile_id = ? WHERE unit_id = ? AND unit_type_id = ?"); ds.addArguments(profile.getId(), unitId, unittype.getId()); ps = ds.makePreparedStatement(connection); ps.setQueryTimeout(60); int rowsUpdated = ps.executeUpdate(); if (rowsUpdated == 0) throw ex; if (rowsUpdated > 0) { SyslogClient.notice(unitId, "Moved unit to profile " + profile.getName(), syslog); logger.notice("Moved unit " + unitId + " to profile " + profile.getName()); } } if (i > 0 && i % 100 == 0) connection.commit(); } connection.commit(); } catch (SQLException sqle) { connection.rollback(); throw sqle; } finally { if (ps != null) ps.close(); if (connection != null) ConnectionProvider.returnConnection(connection, null); } } /** * This method is made for moving a number of units from one profile to * another. If you try to move a unitId which does not exist, then the * method aborts with an sqlexception. * * @param unitIds * @param profile * @throws SQLException * @throws NoAvailableConnectionException */ public void moveUnits(List<String> unitIds, Profile profile) throws SQLException, NoAvailableConnectionException { Connection connection = null; PreparedStatement ps = null; SQLException sqlex = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); Integer unittypeId = profile.getUnittype().getId(); Integer profileId = profile.getId(); for (int i = 0; unitIds != null && i < unitIds.size(); i++) { DynamicStatement ds = new DynamicStatement(); ds.addSqlAndArguments("UPDATE unit SET profile_id = ? WHERE unit_id = ? AND unit_type_id = ?", profileId, unitIds.get(i), unittypeId); ps = ds.makePreparedStatement(connection); ps.setQueryTimeout(60); ps.executeUpdate(); logger.notice("Moved unit " + unitIds.get(i) + " to profile " + profile.getName()); } connection.commit(); } catch (SQLException sqle) { sqlex = sqle; connection.rollback(); throw sqle; } finally { if (ps != null) ps.close(); if (connection != null) ConnectionProvider.returnConnection(connection, sqlex); } } private int executeSql(String sql, Connection c, UnittypeParameter unittypeParameter, String value, String unitId) throws SQLException { PreparedStatement pp = c.prepareStatement(sql); pp.setString(1, value); pp.setString(2, unitId); pp.setInt(3, unittypeParameter.getId()); pp.setQueryTimeout(60); int rowsupdated = pp.executeUpdate(); pp.close(); return rowsupdated; } public List<String> getUnitIdsFromSessionUnitParameters() throws SQLException, NoAvailableConnectionException { Connection connection = null; PreparedStatement ps = null; ResultSet rs = null; SQLException sqlex = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); DynamicStatement ds = new DynamicStatement(); ds.addSql("SELECT unit_id FROM unit_param_session"); ps = ds.makePreparedStatement(connection); ps.setQueryTimeout(60); rs = ps.executeQuery(); List<String> unitIds = new ArrayList<String>(); while (rs.next()) { unitIds.add(rs.getString("unit_id")); } return unitIds; } catch (SQLException sqle) { sqlex = sqle; throw sqle; } finally { if (ps != null) ps.close(); if (connection != null) ConnectionProvider.returnConnection(connection, sqlex); } } private void addOrChangeUnitParameters(List<UnitParameter> unitParameters, Profile prof, boolean session) throws NoAvailableConnectionException, SQLException { Connection connection = null; PreparedStatement pp = null; String sql = null; boolean updateFirst = true; String tableName = "unit_param"; if (session) tableName += "_session"; try { if (updateCounter < insertCounter) updateFirst = false; connection = ConnectionProvider.getConnection(connectionProperties, false); for (int i = 0; unitParameters != null && i < unitParameters.size(); i++) { UnitParameter unitParameter = unitParameters.get(i); String unitId = unitParameter.getUnitId(); Parameter parameter = unitParameter.getParameter(); if (parameter.getValue() != null && parameter.getValue().length() > 512) parameter.setValue(parameter.getValue().substring(0, 509) + "..."); String value = parameter.getValue(); // will be "" if value was null String utpName = parameter.getUnittypeParameter().getName(); String action = "Updated"; if (updateFirst) { sql = "UPDATE " + tableName + " SET value = ? WHERE unit_id = ? AND unit_type_param_id = ?"; int rowsupdated = executeSql(sql, connection, parameter.getUnittypeParameter(), value, unitId); if (rowsupdated == 0) { insertCounter++; action = "Added"; sql = "INSERT INTO " + tableName + " (value, unit_id, unit_type_param_id) VALUES (?, ?, ?)"; executeSql(sql, connection, parameter.getUnittypeParameter(), value, unitId); } else updateCounter++; } else { sql = "INSERT INTO " + tableName + " (value, unit_id, unit_type_param_id) VALUES (?, ?, ?)"; try { executeSql(sql, connection, parameter.getUnittypeParameter(), value, unitId); insertCounter++; action = "Added"; } catch (SQLException insertEx) { updateCounter++; sql = "UPDATE " + tableName + " SET value = ? WHERE unit_id = ? AND unit_type_param_id = ?"; int rowsupdated = executeSql(sql, connection, parameter.getUnittypeParameter(), value, unitId); if (rowsupdated == 0) throw insertEx; } } if (updateCounter > 25 || insertCounter > 25) { if (updateCounter > insertCounter) { updateCounter = 1; insertCounter = 0; } else { updateCounter = 0; insertCounter = 1; } } String msg = null; if (tableName.contains("session")) msg = action + " temporary unit parameter " + utpName; else msg = action + " unit parameter " + utpName; if (parameter.getUnittypeParameter().getFlag().isConfidential()) msg += " with confidental value (*****)"; else msg += " with value " + parameter.getValue(); SyslogClient.notice(unitId, msg, syslog); logger.notice(msg); if (i > 0 && i % 100 == 0) connection.commit(); } connection.commit(); } catch (SQLException sqle) { connection.rollback(); throw sqle; } finally { if (pp != null) pp.close(); if (connection != null) ConnectionProvider.returnConnection(connection, null); } } public void addOrChangeQueuedUnitParameters(Unit unit) throws SQLException, NoAvailableConnectionException { List<UnitParameter> queuedParameters = unit.flushWriteQueue(); Iterator<UnitParameter> iterator = queuedParameters.iterator(); while (iterator.hasNext()) { UnitParameter queuedUp = iterator.next(); UnitParameter storedUp = unit.getUnitParameters().get(queuedUp.getParameter().getUnittypeParameter().getName()); if (storedUp != null && storedUp.getValue() != null && storedUp.equals(queuedUp.getValue())) { iterator.remove(); // don't write the queued Unit Parameter if it has the same value as already stored } } addOrChangeUnitParameters(queuedParameters, unit.getProfile()); } public void addOrChangeUnitParameters(List<UnitParameter> unitParameters, Profile prof) throws SQLException, NoAvailableConnectionException { addOrChangeUnitParameters(unitParameters, prof, false); } public void addOrChangeUnitParameter(Unit unit, String unittypeParameterName, String value) throws SQLException, NoAvailableConnectionException { Unittype unittype = unit.getUnittype(); Parameter parameter = new Parameter(unittype.getUnittypeParameters().getByName(unittypeParameterName), value); UnitParameter up = new UnitParameter(parameter, unit.getId(), unit.getProfile()); List<UnitParameter> ups = new ArrayList<UnitParameter>(); ups.add(up); addOrChangeUnitParameters(ups, unit.getProfile()); } public void addOrChangeSessionUnitParameters(List<UnitParameter> unitParameters, Profile prof) throws NoAvailableConnectionException, SQLException { if (XAPSVersionCheck.unitParamSessionSupported) addOrChangeUnitParameters(unitParameters, prof, true); } /** * Deletes the unit and all the unitparameters in that unit. * * @param unit * @throws SQLException * @throws NoAvailableConnectionException */ public int deleteUnit(Unit unit) throws SQLException, NoAvailableConnectionException { Connection connection = null; PreparedStatement ps = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); DynamicStatement ds = new DynamicStatement(); ds.addSqlAndArguments("DELETE FROM unit_param WHERE unit_id = ?", unit.getId()); ps = ds.makePreparedStatement(connection); ps.setQueryTimeout(60); int paramsDeleted = ps.executeUpdate(); ps.close(); ds = new DynamicStatement(); ds.addSqlAndArguments("DELETE FROM unit_job WHERE unit_id = ?", unit.getId()); ps = ds.makePreparedStatement(connection); ps.setQueryTimeout(60); int unitJobsDeleted = ps.executeUpdate(); ps.close(); ds = new DynamicStatement(); ds.addSqlAndArguments("DELETE FROM unit WHERE unit_id = ?", unit.getId()); ps = ds.makePreparedStatement(connection); ps.setQueryTimeout(60); int rowsDeleted = ps.executeUpdate(); ps.close(); connection.commit(); if (paramsDeleted > 0) logger.notice("Deleted " + paramsDeleted + " unit parameters for unit " + unit.getId()); if (unitJobsDeleted > 0) logger.notice("Deleted " + unitJobsDeleted + " unit jobs for unit " + unit.getId()); if (rowsDeleted == 0) logger.warn("No unit deleted, possibly because it did not exist."); else { SyslogClient.notice(unit.getId(), "Deleted unit", syslog); logger.notice("Deleted unit " + unit.getId()); } return rowsDeleted; } catch (SQLException sqle) { connection.rollback(); throw sqle; } finally { if (ps != null) ps.close(); if (connection != null) ConnectionProvider.returnConnection(connection, null); } } /** * Deletes all unitparameters and units for the units in the given profile. * WARNING: These SQL statements may be very slow to execute. Look at the * plan here: 1. SQL to get all Units in a profile 2. Iterate over all units * in profile and delete parameters for each one. 3. SQL to delete all units * in a profile If you have 100000 units in a profile, then you will need to * run 100002 SQL statements. That is going to take a long time. * * @param profile * @throws SQLException * @throws NoAvailableConnectionException */ public int deleteUnits(Profile profile) throws SQLException, NoAvailableConnectionException { Statement s = null; String sql = null; Map<String, Unit> unitMap = getUnits(profile.getUnittype(), profile, (Parameter) null, Integer.MAX_VALUE); Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); s = connection.createStatement(); int counter = 0; int upDeleted = 0; for (String unitId : unitMap.keySet()) { sql = "DELETE FROM unit_param WHERE unit_id = '" + unitId + "'"; upDeleted += s.executeUpdate(sql); if (counter > 0 && counter % 100 == 0) connection.commit(); counter++; } logger.notice("Deleted unit parameters for all units in for profile " + profile.getName() + "(" + upDeleted + " parameters deleted)"); sql = "DELETE FROM unit WHERE profile_id = " + profile.getId(); int rowsDeleted = s.executeUpdate(sql); logger.notice("Deleted all units in for profile " + profile.getName() + "(" + rowsDeleted + " units deleted)"); connection.commit(); return rowsDeleted; } catch (SQLException sqle) { connection.rollback(); throw sqle; } finally { if (s != null) s.close(); if (connection != null) ConnectionProvider.returnConnection(connection, null); } } /** * See comment on deleteUnits(Profile) */ public int deleteUnits(Unittype unittype) throws SQLException, NoAvailableConnectionException { Profile[] profiles = unittype.getProfiles().getProfiles(); int rowsDeleted = 0; for (int i = 0; i < profiles.length; i++) { rowsDeleted += deleteUnits(profiles[i]); } return rowsDeleted; } public void deleteUnitParameters(Unit unit) throws SQLException, NoAvailableConnectionException { deleteUnitParameters(unit.flushDeleteQueue()); } /** * Deletes all unitparameters in the list. If list set to null then all * parameters are deleted. * * @throws SQLException * @throws NoAvailableConnectionException */ public int deleteUnitParameters(List<UnitParameter> unitParameters) throws SQLException, NoAvailableConnectionException { Connection connection = null; Statement s = null; String sql = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); s = connection.createStatement(); int rowsDeleted = 0; for (int i = 0; i < unitParameters.size(); i++) { UnitParameter unitParameter = unitParameters.get(i); Integer utpId = unitParameter.getParameter().getUnittypeParameter().getId(); String unitId = unitParameter.getUnitId(); sql = "DELETE FROM unit_param WHERE unit_id = '" + unitId + "' AND unit_type_param_id = " + utpId; s.setQueryTimeout(60); rowsDeleted += s.executeUpdate(sql); if (rowsDeleted > 0) { SyslogClient.notice(unitId, "Deleted unit parameter " + unitParameter.getParameter().getUnittypeParameter(), syslog); logger.notice("Deleted unit parameter " + unitParameter.getParameter().getUnittypeParameter()); } } connection.commit(); return rowsDeleted; } catch (SQLException sqle) { connection.rollback(); throw sqle; } finally { if (s != null) s.close(); if (connection != null) ConnectionProvider.returnConnection(connection, null); } } public int deleteAllSessionParameters(Unit unit) throws NoAvailableConnectionException, SQLException { if (!XAPSVersionCheck.unitParamSessionSupported) return 0; Connection connection = null; Statement s = null; String sql = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); s = connection.createStatement(); int rowsDeleted = 0; sql = "DELETE FROM unit_param_session WHERE unit_id = '" + unit.getId() + "'"; s.setQueryTimeout(60); rowsDeleted += s.executeUpdate(sql); if (rowsDeleted > 0) logger.notice("Deleted " + rowsDeleted + " unit session parameters"); connection.commit(); return rowsDeleted; } catch (SQLException sqle) { throw sqle; } finally { if (s != null) s.close(); if (connection != null) ConnectionProvider.returnConnection(connection, null); } } /** * Deletes all units and the belonging unit-parameters in the list. This * method commits during the execution, so if something fails during the * execution, something might already have been committed. This is done for * performance reasons. * * @throws SQLException * @throws NoAvailableConnectionException */ public int deleteUnits(List<String> unitIds, Profile profile) throws SQLException, NoAvailableConnectionException { Connection connection = null; Statement s = null; String sql = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); s = connection.createStatement(); int rowsDeleted = 0; for (int i = 0; i < unitIds.size(); i++) { sql = "DELETE FROM unit_param WHERE unit_id = '" + unitIds.get(i) + "'"; s.setQueryTimeout(60); int upDeleted = s.executeUpdate(sql); logger.notice("Deleted all unit parameters for unit " + unitIds.get(i) + "(" + upDeleted + " parameters deleted)"); sql = "DELETE FROM unit WHERE unit_id = '" + unitIds.get(i) + "'"; s.setQueryTimeout(60); rowsDeleted += s.executeUpdate(sql); SyslogClient.notice(unitIds.get(i), "Deleted unit", syslog); logger.notice("Deleted unit " + unitIds.get(i)); if (i > 0 && i % 100 == 0) connection.commit(); } connection.commit(); return rowsDeleted; } catch (SQLException sqle) { // We will rollback that which is not yet commited. connection.rollback(); throw sqle; } finally { if (s != null) s.close(); if (connection != null) ConnectionProvider.returnConnection(connection, null); } } /** * This method will return a list of units where only unit-id is populated. To retrieve all parameters of * the unit, run getUnitsWithParameters(Unittype,Profile,List<Unit>) * @param values * @return * @throws NoAvailableConnectionException * @throws SQLException */ public List<Unit> getLimitedUnitsByValues(List<String> values) throws NoAvailableConnectionException, SQLException { Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); UnitQueryCrossUnittype uqcu = new UnitQueryCrossUnittype(connection, xaps, (Unittype) null, (Profile) null); return uqcu.getLimitedUnitsByValue(values); } finally { if (connection != null) { ConnectionProvider.returnConnection(connection, null); } } } /** * This method will return a map of fully populated Unit objects. Do not ask for a large number of units (>100), since then it may take a * long time to complete. Also the result-set may be memory-intensive if very large. * * @param unitIds - this list must be retrieved by running one of the getUnits() methods * @param unittype - may be null * @param profile - may be null * @return map of fully populated Unit objects */ public List<Unit> getUnitsWithParameters(Unittype unittype, Profile profile, List<Unit> units) throws SQLException { Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); UnitQueryCrossUnittype uqcu = new UnitQueryCrossUnittype(connection, xaps, unittype, profile); return uqcu.getUnitsById(units); } finally { if (connection != null) { ConnectionProvider.returnConnection(connection, null); } } } /** * * @param searchStr - may be null, if not null it will search for matches against unit-ids or unit-parameter values * @param unittype - may be null * @param profile - may be null * @param maxRows - may be null * @return a set of units. The unit object is not populated with unit parameters * @throws SQLException * @throws NoAvailableConnectionException */ public Map<String, Unit> getUnits(String searchStr, Unittype unittype, Profile profile, Integer maxRows) throws SQLException, NoAvailableConnectionException { Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); UnitQueryCrossUnittype uqcu = new UnitQueryCrossUnittype(connection, xaps, unittype, profile); return uqcu.getUnits(searchStr, maxRows); } finally { if (connection != null) { ConnectionProvider.returnConnection(connection, null); } } } public Map<String, Unit> getUnits(String searchStr, List<Profile> profiles, Integer maxRows) throws SQLException, NoAvailableConnectionException { Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); UnitQueryCrossUnittype uqcu = new UnitQueryCrossUnittype(connection, xaps, (Unittype) null, profiles); return uqcu.getUnits(searchStr, maxRows); } finally { if (connection != null) { ConnectionProvider.returnConnection(connection, null); } } } /** * * @param unittype - Must specify * @param profiles - If omitted, they will be set to the list of allowed profiles for this unittype * @param parameters - If preset, they must be from the unittype * * @return A set of units with unit-parameters populated for those parameters asked for */ public Map<String, Unit> getUnits(Unittype unittype, List<Profile> profiles, List<Parameter> parameters, Integer limit) throws SQLException, NoAvailableConnectionException { Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); UnitQueryWithinUnittype uqwu = new UnitQueryWithinUnittype(connection, xaps, unittype, profiles); return uqwu.getUnits(parameters, limit); } finally { if (connection != null) { ConnectionProvider.returnConnection(connection, null); } } } public Map<String, Unit> getUnits(Unittype unittype, Profile profile, List<Parameter> parameters, Integer limit) throws SQLException, NoAvailableConnectionException { List<Profile> profiles = new ArrayList<Profile>(); if (profile != null) profiles.add(profile); return getUnits(unittype, profiles, parameters, limit); } public Map<String, Unit> getUnits(Unittype unittype, Profile profile, Parameter parameter, Integer limit) throws SQLException, NoAvailableConnectionException { List<Profile> profiles = new ArrayList<Profile>(); if (profile != null) profiles.add(profile); List<Parameter> parameters = new ArrayList<Parameter>(); if (parameter != null) parameters.add(parameter); return getUnits(unittype, profiles, parameters, limit); } public Map<String, Unit> getUnits(Unittype unittype, List<Profile> profiles, Parameter parameter, Integer limit) throws SQLException, NoAvailableConnectionException { List<Parameter> parameters = new ArrayList<Parameter>(); if (parameter != null) parameters.add(parameter); return getUnits(unittype, profiles, parameters, limit); } public Map<String, Unit> getUnits(Group group) throws SQLException, NoAvailableConnectionException { Group topParent = group.getTopParent(); Profile profile = topParent.getProfile(); Unittype unittype = group.getUnittype(); return getUnits(unittype, profile, group.getGroupParameters().getAllParameters(group), Integer.MAX_VALUE); } public int getUnitCount(Group group) throws SQLException, NoAvailableConnectionException { Group topParent = group.getTopParent(); Profile profile = topParent.getProfile(); Unittype unittype = group.getUnittype(); return getUnitCount(unittype, profile, group.getGroupParameters().getAllParameters(group)); } /** * * @param unittype - Must specify * @param profiles - If omitted, they will be set to the list of allowed profiles for this unittype * @param parameters - If preset, they must be from the unittype * * @return A set of units with unit-parameters populated for those parameters asked for */ public int getUnitCount(Unittype unittype, List<Profile> profiles, List<Parameter> parameters) throws SQLException, NoAvailableConnectionException { Connection connection = null; try { connection = ConnectionProvider.getConnection(connectionProperties, false); UnitQueryWithinUnittype uqwu = new UnitQueryWithinUnittype(connection, xaps, unittype, profiles); return uqwu.getUnitCount(parameters); } finally { if (connection != null) { ConnectionProvider.returnConnection(connection, null); } } } public int getUnitCount(Unittype unittype, List<Profile> profiles, Parameter parameter) throws SQLException, NoAvailableConnectionException { List<Parameter> parameters = new ArrayList<Parameter>(); if (parameter != null) parameters.add(parameter); return getUnitCount(unittype, profiles, parameters); } public int getUnitCount(Unittype unittype, Profile profile, List<Parameter> parameters) throws SQLException, NoAvailableConnectionException { List<Profile> profiles = new ArrayList<Profile>(); if (profile != null) profiles.add(profile); return getUnitCount(unittype, profiles, parameters); } public int getUnitCount(Unittype unittype, Profile profile, Parameter parameter) throws SQLException, NoAvailableConnectionException { List<Profile> profiles = new ArrayList<Profile>(); if (profile != null) profiles.add(profile); List<Parameter> parameters = new ArrayList<Parameter>(); if (parameter != null) parameters.add(parameter); return getUnitCount(unittype, profiles, parameters); } }
/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.settings.location; import android.content.Context; import android.content.Intent; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.content.pm.ServiceInfo; import android.content.res.Resources; import android.content.res.TypedArray; import android.content.res.XmlResourceParser; import android.graphics.drawable.Drawable; import android.location.SettingInjectorService; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.os.Messenger; import android.os.SystemClock; import android.os.UserHandle; import android.os.UserManager; import android.preference.Preference; import android.util.AttributeSet; import android.util.Log; import android.util.Xml; import com.android.settings.R; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; /** * Adds the preferences specified by the {@link InjectedSetting} objects to a preference group. * * Duplicates some code from {@link android.content.pm.RegisteredServicesCache}. We do not use that * class directly because it is not a good match for our use case: we do not need the caching, and * so do not want the additional resource hit at app install/upgrade time; and we would have to * suppress the tie-breaking between multiple services reporting settings with the same name. * Code-sharing would require extracting {@link * android.content.pm.RegisteredServicesCache#parseServiceAttributes(android.content.res.Resources, * String, android.util.AttributeSet)} into an interface, which didn't seem worth it. */ class SettingsInjector { static final String TAG = "SettingsInjector"; /** * If reading the status of a setting takes longer than this, we go ahead and start reading * the next setting. */ private static final long INJECTED_STATUS_UPDATE_TIMEOUT_MILLIS = 1000; /** * {@link Message#what} value for starting to load status values * in case we aren't already in the process of loading them. */ private static final int WHAT_RELOAD = 1; /** * {@link Message#what} value sent after receiving a status message. */ private static final int WHAT_RECEIVED_STATUS = 2; /** * {@link Message#what} value sent after the timeout waiting for a status message. */ private static final int WHAT_TIMEOUT = 3; private final Context mContext; /** * The settings that were injected */ private final Set<Setting> mSettings; private final Handler mHandler; public SettingsInjector(Context context) { mContext = context; mSettings = new HashSet<Setting>(); mHandler = new StatusLoadingHandler(); } /** * Returns a list for a profile with one {@link InjectedSetting} object for each * {@link android.app.Service} that responds to * {@link SettingInjectorService#ACTION_SERVICE_INTENT} and provides the expected setting * metadata. * * Duplicates some code from {@link android.content.pm.RegisteredServicesCache}. * * TODO: unit test */ private List<InjectedSetting> getSettings(final UserHandle userHandle) { PackageManager pm = mContext.getPackageManager(); Intent intent = new Intent(SettingInjectorService.ACTION_SERVICE_INTENT); final int profileId = userHandle.getIdentifier(); List<ResolveInfo> resolveInfos = pm.queryIntentServicesAsUser(intent, PackageManager.GET_META_DATA, profileId); if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "Found services for profile id " + profileId + ": " + resolveInfos); } List<InjectedSetting> settings = new ArrayList<InjectedSetting>(resolveInfos.size()); for (ResolveInfo resolveInfo : resolveInfos) { try { InjectedSetting setting = parseServiceInfo(resolveInfo, userHandle, pm); if (setting == null) { Log.w(TAG, "Unable to load service info " + resolveInfo); } else { settings.add(setting); } } catch (XmlPullParserException e) { Log.w(TAG, "Unable to load service info " + resolveInfo, e); } catch (IOException e) { Log.w(TAG, "Unable to load service info " + resolveInfo, e); } } if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "Loaded settings for profile id " + profileId + ": " + settings); } return settings; } /** * Returns the settings parsed from the attributes of the * {@link SettingInjectorService#META_DATA_NAME} tag, or null. * * Duplicates some code from {@link android.content.pm.RegisteredServicesCache}. */ private static InjectedSetting parseServiceInfo(ResolveInfo service, UserHandle userHandle, PackageManager pm) throws XmlPullParserException, IOException { ServiceInfo si = service.serviceInfo; ApplicationInfo ai = si.applicationInfo; if ((ai.flags & ApplicationInfo.FLAG_SYSTEM) == 0) { if (Log.isLoggable(TAG, Log.WARN)) { Log.w(TAG, "Ignoring attempt to inject setting from app not in system image: " + service); return null; } } XmlResourceParser parser = null; try { parser = si.loadXmlMetaData(pm, SettingInjectorService.META_DATA_NAME); if (parser == null) { throw new XmlPullParserException("No " + SettingInjectorService.META_DATA_NAME + " meta-data for " + service + ": " + si); } AttributeSet attrs = Xml.asAttributeSet(parser); int type; while ((type = parser.next()) != XmlPullParser.END_DOCUMENT && type != XmlPullParser.START_TAG) { } String nodeName = parser.getName(); if (!SettingInjectorService.ATTRIBUTES_NAME.equals(nodeName)) { throw new XmlPullParserException("Meta-data does not start with " + SettingInjectorService.ATTRIBUTES_NAME + " tag"); } Resources res = pm.getResourcesForApplicationAsUser(si.packageName, userHandle.getIdentifier()); return parseAttributes(si.packageName, si.name, userHandle, res, attrs); } catch (PackageManager.NameNotFoundException e) { throw new XmlPullParserException( "Unable to load resources for package " + si.packageName); } finally { if (parser != null) { parser.close(); } } } /** * Returns an immutable representation of the static attributes for the setting, or null. */ private static InjectedSetting parseAttributes(String packageName, String className, UserHandle userHandle, Resources res, AttributeSet attrs) { TypedArray sa = res.obtainAttributes(attrs, android.R.styleable.SettingInjectorService); try { // Note that to help guard against malicious string injection, we do not allow dynamic // specification of the label (setting title) final String title = sa.getString(android.R.styleable.SettingInjectorService_title); final int iconId = sa.getResourceId(android.R.styleable.SettingInjectorService_icon, 0); final String settingsActivity = sa.getString(android.R.styleable.SettingInjectorService_settingsActivity); if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "parsed title: " + title + ", iconId: " + iconId + ", settingsActivity: " + settingsActivity); } return InjectedSetting.newInstance(packageName, className, title, iconId, userHandle, settingsActivity); } finally { sa.recycle(); } } /** * Gets a list of preferences that other apps have injected. * * @param profileId Identifier of the user/profile to obtain the injected settings for or * UserHandle.USER_CURRENT for all profiles associated with current user. */ public List<Preference> getInjectedSettings(final int profileId) { final UserManager um = (UserManager) mContext.getSystemService(Context.USER_SERVICE); final List<UserHandle> profiles = um.getUserProfiles(); ArrayList<Preference> prefs = new ArrayList<Preference>(); final int profileCount = profiles.size(); for (int i = 0; i < profileCount; ++i) { final UserHandle userHandle = profiles.get(i); if (profileId == UserHandle.USER_CURRENT || profileId == userHandle.getIdentifier()) { Iterable<InjectedSetting> settings = getSettings(userHandle); for (InjectedSetting setting : settings) { Preference pref = addServiceSetting(prefs, setting); mSettings.add(new Setting(setting, pref)); } } } reloadStatusMessages(); return prefs; } /** * Reloads the status messages for all the preference items. */ public void reloadStatusMessages() { if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "reloadingStatusMessages: " + mSettings); } mHandler.sendMessage(mHandler.obtainMessage(WHAT_RELOAD)); } /** * Adds an injected setting to the root. */ private Preference addServiceSetting(List<Preference> prefs, InjectedSetting info) { PackageManager pm = mContext.getPackageManager(); Drawable appIcon = pm.getDrawable(info.packageName, info.iconId, null); Drawable icon = pm.getUserBadgedIcon(appIcon, info.mUserHandle); CharSequence badgedAppLabel = pm.getUserBadgedLabel(info.title, info.mUserHandle); if (info.title.contentEquals(badgedAppLabel)) { // If badged label is not different from original then no need for it as // a separate content description. badgedAppLabel = null; } Preference pref = new DimmableIconPreference(mContext, badgedAppLabel); pref.setTitle(info.title); pref.setSummary(null); pref.setIcon(icon); pref.setOnPreferenceClickListener(new ServiceSettingClickedListener(info)); prefs.add(pref); return pref; } private class ServiceSettingClickedListener implements Preference.OnPreferenceClickListener { private InjectedSetting mInfo; public ServiceSettingClickedListener(InjectedSetting info) { mInfo = info; } @Override public boolean onPreferenceClick(Preference preference) { // Activity to start if they click on the preference. Must start in new task to ensure // that "android.settings.LOCATION_SOURCE_SETTINGS" brings user back to // Settings > Location. Intent settingIntent = new Intent(); settingIntent.setClassName(mInfo.packageName, mInfo.settingsActivity); settingIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); mContext.startActivityAsUser(settingIntent, mInfo.mUserHandle); return true; } } /** * Loads the setting status values one at a time. Each load starts a subclass of {@link * SettingInjectorService}, so to reduce memory pressure we don't want to load too many at * once. */ private final class StatusLoadingHandler extends Handler { /** * Settings whose status values need to be loaded. A set is used to prevent redundant loads. */ private Set<Setting> mSettingsToLoad = new HashSet<Setting>(); /** * Settings that are being loaded now and haven't timed out. In practice this should have * zero or one elements. */ private Set<Setting> mSettingsBeingLoaded = new HashSet<Setting>(); /** * Settings that are being loaded but have timed out. If only one setting has timed out, we * will go ahead and start loading the next setting so that one slow load won't delay the * load of the other settings. */ private Set<Setting> mTimedOutSettings = new HashSet<Setting>(); private boolean mReloadRequested; @Override public void handleMessage(Message msg) { if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "handleMessage start: " + msg + ", " + this); } // Update state in response to message switch (msg.what) { case WHAT_RELOAD: mReloadRequested = true; break; case WHAT_RECEIVED_STATUS: final Setting receivedSetting = (Setting) msg.obj; receivedSetting.maybeLogElapsedTime(); mSettingsBeingLoaded.remove(receivedSetting); mTimedOutSettings.remove(receivedSetting); removeMessages(WHAT_TIMEOUT, receivedSetting); break; case WHAT_TIMEOUT: final Setting timedOutSetting = (Setting) msg.obj; mSettingsBeingLoaded.remove(timedOutSetting); mTimedOutSettings.add(timedOutSetting); if (Log.isLoggable(TAG, Log.WARN)) { Log.w(TAG, "Timed out after " + timedOutSetting.getElapsedTime() + " millis trying to get status for: " + timedOutSetting); } break; default: Log.wtf(TAG, "Unexpected what: " + msg); } // Decide whether to load additional settings based on the new state. Start by seeing // if we have headroom to load another setting. if (mSettingsBeingLoaded.size() > 0 || mTimedOutSettings.size() > 1) { // Don't load any more settings until one of the pending settings has completed. // To reduce memory pressure, we want to be loading at most one setting (plus at // most one timed-out setting) at a time. This means we'll be responsible for // bringing in at most two services. if (Log.isLoggable(TAG, Log.VERBOSE)) { Log.v(TAG, "too many services already live for " + msg + ", " + this); } return; } if (mReloadRequested && mSettingsToLoad.isEmpty() && mSettingsBeingLoaded.isEmpty() && mTimedOutSettings.isEmpty()) { if (Log.isLoggable(TAG, Log.VERBOSE)) { Log.v(TAG, "reloading because idle and reload requesteed " + msg + ", " + this); } // Reload requested, so must reload all settings mSettingsToLoad.addAll(mSettings); mReloadRequested = false; } // Remove the next setting to load from the queue, if any Iterator<Setting> iter = mSettingsToLoad.iterator(); if (!iter.hasNext()) { if (Log.isLoggable(TAG, Log.VERBOSE)) { Log.v(TAG, "nothing left to do for " + msg + ", " + this); } return; } Setting setting = iter.next(); iter.remove(); // Request the status value setting.startService(); mSettingsBeingLoaded.add(setting); // Ensure that if receiving the status value takes too long, we start loading the // next value anyway Message timeoutMsg = obtainMessage(WHAT_TIMEOUT, setting); sendMessageDelayed(timeoutMsg, INJECTED_STATUS_UPDATE_TIMEOUT_MILLIS); if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, "handleMessage end " + msg + ", " + this + ", started loading " + setting); } } @Override public String toString() { return "StatusLoadingHandler{" + "mSettingsToLoad=" + mSettingsToLoad + ", mSettingsBeingLoaded=" + mSettingsBeingLoaded + ", mTimedOutSettings=" + mTimedOutSettings + ", mReloadRequested=" + mReloadRequested + '}'; } } /** * Represents an injected setting and the corresponding preference. */ private final class Setting { public final InjectedSetting setting; public final Preference preference; public long startMillis; private Setting(InjectedSetting setting, Preference preference) { this.setting = setting; this.preference = preference; } @Override public String toString() { return "Setting{" + "setting=" + setting + ", preference=" + preference + '}'; } /** * Returns true if they both have the same {@link #setting} value. Ignores mutable * {@link #preference} and {@link #startMillis} so that it's safe to use in sets. */ @Override public boolean equals(Object o) { return this == o || o instanceof Setting && setting.equals(((Setting) o).setting); } @Override public int hashCode() { return setting.hashCode(); } /** * Starts the service to fetch for the current status for the setting, and updates the * preference when the service replies. */ public void startService() { Handler handler = new Handler() { @Override public void handleMessage(Message msg) { Bundle bundle = msg.getData(); boolean enabled = bundle.getBoolean(SettingInjectorService.ENABLED_KEY, true); if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, setting + ": received " + msg + ", bundle: " + bundle); } preference.setSummary(null); preference.setEnabled(enabled); mHandler.sendMessage( mHandler.obtainMessage(WHAT_RECEIVED_STATUS, Setting.this)); } }; Messenger messenger = new Messenger(handler); Intent intent = setting.getServiceIntent(); intent.putExtra(SettingInjectorService.MESSENGER_KEY, messenger); if (Log.isLoggable(TAG, Log.DEBUG)) { Log.d(TAG, setting + ": sending update intent: " + intent + ", handler: " + handler); startMillis = SystemClock.elapsedRealtime(); } else { startMillis = 0; } // Start the service, making sure that this is attributed to the user associated with // the setting rather than the system user. mContext.startServiceAsUser(intent, setting.mUserHandle); } public long getElapsedTime() { long end = SystemClock.elapsedRealtime(); return end - startMillis; } public void maybeLogElapsedTime() { if (Log.isLoggable(TAG, Log.DEBUG) && startMillis != 0) { long elapsed = getElapsedTime(); Log.d(TAG, this + " update took " + elapsed + " millis"); } } } }
/** * Copyright 2012 Tobias Gierke <tobias.gierke@code-sourcery.de> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.codesourcery.jasm16.ide.ui.viewcontainers; import java.awt.Color; import java.awt.Dimension; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.swing.JDesktopPane; import javax.swing.JFrame; import javax.swing.JInternalFrame; import javax.swing.event.InternalFrameAdapter; import javax.swing.event.InternalFrameEvent; import javax.swing.event.InternalFrameListener; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import de.codesourcery.jasm16.ide.IApplicationConfig; import de.codesourcery.jasm16.ide.ui.MenuManager; import de.codesourcery.jasm16.ide.ui.MenuManager.MenuEntry; import de.codesourcery.jasm16.ide.ui.utils.SizeAndLocation; import de.codesourcery.jasm16.ide.ui.utils.UIUtils; import de.codesourcery.jasm16.ide.ui.views.IView; import de.codesourcery.jasm16.ide.ui.views.IViewStateListener; /** * A view container that inherits from {@link JFrame} and uses {@link JInternalFrame}s to display * it's children. * * @author tobias.gierke@code-sourcery.de */ public class Perspective extends JFrame implements IViewContainer { private static final Logger LOG = Logger.getLogger(Perspective.class); private final JDesktopPane desktop = new JDesktopPane(); private final List<InternalFrameWithView> views = new ArrayList<InternalFrameWithView>(); private final String id; private final ViewContainerHelper helper = new ViewContainerHelper(); private final ViewContainerManager viewContainerManager; private final IApplicationConfig applicationConfig; private final MenuManager menuManager = new MenuManager() { @Override public void menuBarChanged() { setJMenuBar( menuManager.getMenuBar() ); } }; protected final class InternalFrameWithView { public final JInternalFrame frame; public final IView view; private final InternalFrameListener frameListener; public InternalFrameWithView(JInternalFrame frame,final IView view) { this.view = view; this.frame = frame; if ( view instanceof IViewStateListener ) { frameListener = new InternalFrameAdapter() { @Override public void internalFrameActivated(InternalFrameEvent e) { ((IViewStateListener) view).viewVisible(); } @Override public void internalFrameDeactivated(InternalFrameEvent e) { ((IViewStateListener) view).viewHidden(); } }; frame.addInternalFrameListener( frameListener ); } else { frameListener = null; } } public void dispose() { final SizeAndLocation sizeAndLoc = new SizeAndLocation( frame.getLocation() , frame.getSize() ); applicationConfig.storeViewCoordinates( getUniqueID( view ) , sizeAndLoc ); frame.dispose(); if ( frameListener != null ) { frame.removeInternalFrameListener( frameListener ); } LOG.debug("dispose(): Disposing "+view); view.dispose(); } } private final String getUniqueID(IView view) { return getID()+"."+view.getID(); } @Override public void setBlockAllUserInput(boolean yesNo) { UIUtils.setBlockAllUserInput( this , yesNo ); } @Override public final void dispose() { disposeHook(); final SizeAndLocation sizeAndLoc = new SizeAndLocation( getLocation() , getSize() ); applicationConfig.storeViewCoordinates( getID() , sizeAndLoc ); final List<InternalFrameWithView> views = new ArrayList<InternalFrameWithView>(this.views); for ( InternalFrameWithView v : views) { disposeView( v.view ); } super.dispose(); viewContainerManager.disposeAllExcept( this ); helper.fireViewContainerClosed( this ); try { this.applicationConfig.saveConfiguration(); } catch (IOException e) { LOG.error("dispose(): Failed to save view coordinates",e); } } protected void disposeHook() { } public Perspective(String id , final ViewContainerManager viewContainerManager , IApplicationConfig appConfig) { super("jASM16 DCPU emulator V"+de.codesourcery.jasm16.compiler.Compiler.getVersionNumber() ); if ( viewContainerManager == null ) { throw new IllegalArgumentException("viewContainerManager must not be null"); } if (appConfig == null) { throw new IllegalArgumentException("appConfig must not be null"); } if (StringUtils.isBlank(id)) { throw new IllegalArgumentException("ID must not be NULL/blank."); } this.viewContainerManager = viewContainerManager; this.id = id; this.applicationConfig = appConfig; setPreferredSize( new Dimension(400,200 ) ); getContentPane().add( desktop ); setDefaultCloseOperation( JFrame.DISPOSE_ON_CLOSE ); setBackground( Color.BLACK ); setForeground( Color.GREEN ); desktop.setBackground( Color.BLACK ); desktop.setForeground( Color.GREEN ); menuManager.addEntry( new MenuEntry("File/Quit") { @Override public void onClick() { dispose(); try { applicationConfig.saveConfiguration(); } catch (IOException e) { e.printStackTrace(); } finally { System.exit(0); } } } ); final SizeAndLocation sizeAndLoc = applicationConfig.getViewCoordinates( getID() ); if ( sizeAndLoc != null ) { setLocation( sizeAndLoc.getLocation() ); setSize( sizeAndLoc.getSize() ); setPreferredSize( sizeAndLoc.getSize() ); } else { setPreferredSize( new Dimension(600,800 ) ); pack(); } setJMenuBar( menuManager.getMenuBar() ); } @Override public void disposeView(IView view) { for (Iterator<InternalFrameWithView> it = this.views.iterator(); it.hasNext();) { InternalFrameWithView frame = it.next(); if ( frame.view == view ) { frame.dispose(); it.remove(); return; } } } @Override public IView addView(final IView view) { if (view == null) { throw new IllegalArgumentException("view must not be NULL"); } final JInternalFrame internalFrame = new JInternalFrame( view.getTitle(),true, true, true, true); internalFrame.setBackground(Color.BLACK); internalFrame.setForeground( Color.GREEN ); internalFrame.getContentPane().add( view.getPanel(this) ); SizeAndLocation sizeAndLoc = applicationConfig.getViewCoordinates( getUniqueID( view ) ); if ( sizeAndLoc != null ) { internalFrame.setSize( sizeAndLoc.getSize() ); internalFrame.setLocation( sizeAndLoc.getLocation() ); } else { internalFrame.setSize(200, 150); internalFrame.setLocation( 0 , 0 ); internalFrame.pack(); } internalFrame.setVisible( true ); final InternalFrameWithView frameAndView = new InternalFrameWithView( internalFrame , view ); final InternalFrameListener listener = new InternalFrameAdapter() { @Override public void internalFrameClosing(InternalFrameEvent e) { disposeView( view ); } }; internalFrame.setDefaultCloseOperation( JInternalFrame.DO_NOTHING_ON_CLOSE ); internalFrame.addInternalFrameListener( listener ); views.add( frameAndView ); desktop.add(internalFrame); return view; } @Override public List<IView> getViews() { final List<IView> result = new ArrayList<IView>(); for (InternalFrameWithView frame : this.views) { result.add( frame.view ); } return result; } @Override public void setTitle(IView view, String title) { for (InternalFrameWithView frame : this.views) { if ( frame.view == view ) { frame.frame.setTitle( title ); break; } } } @Override public IView getViewByID(String viewId) { if (StringUtils.isBlank(viewId)) { throw new IllegalArgumentException("viewId must not be blank/null"); } for (InternalFrameWithView frame : this.views) { if ( frame.view.getID().equals( viewId ) ) { return frame.view; } } return null; } @Override public final void toFront(IView view) { for (InternalFrameWithView frame : this.views) { if ( frame.view.getID().equals( view.getID() ) ) { frame.frame.toFront(); return; } } } @Override public MenuManager getMenuManager() { return menuManager; } @Override public final String getID() { return id; } @Override public void addViewContainerListener(IViewContainerListener listener) { helper.addViewContainerListener( listener ); } @Override public void removeViewContainerListener(IViewContainerListener listener) { helper.removeViewContainerListener( listener ); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edu.duke.biology.baughlab.wormsize; import ij.ImagePlus; import ij.gui.*; import java.util.*; import sc.fiji.analyzeSkeleton.*; import ij.process.ImageProcessor; import Skeletonize3D_.*; //import sc.fiji.skeletonize3D.*; import java.util.ArrayList; import sc.fiji.analyzeSkeleton.AnalyzeSkeleton_; import sc.fiji.analyzeSkeleton.Edge; import sc.fiji.analyzeSkeleton.Graph; import sc.fiji.analyzeSkeleton.Point; import sc.fiji.analyzeSkeleton.SkeletonResult; import sc.fiji.analyzeSkeleton.Vertex; /** * This is a wrapper to the Skeletonize and AnalyzeSkeleton plugins that allows us to get * the points out of the skeleton. * * @author bradleymoore */ public class SkeletonizeWrapper { protected ImagePlus skeletonImage; protected ImagePlus lspImage; protected ArrayList<ArrayList<double[]>> skeletons; protected ImagePlus pointTypeImage; // edges, vertices, junctions, slabs, etc protected ArrayList<Double> scores; public SkeletonizeWrapper() { } /** * Closes the ImagePlus objects opened during processing. */ public void closeAll() { skeletonImage.close(); lspImage.close(); pointTypeImage.close(); } /** * * @param threshold Black and white image (0 and 255) where white is object and black is background. */ public void skeletonize(ImagePlus threshold) { scores = new ArrayList<Double>(); // if i leave the ROI, the duplicate command will just copy a bounding box // huzzah global variable-ish behavior! threshold.killRoi(); ImagePlus tmp = new ImagePlus("", threshold.getProcessor().duplicate().convertToByte(false));//threshold.duplicate(); Skeletonize3D_ skel = new Skeletonize3D_(); skel.setup("", tmp); skel.run(tmp.getProcessor()); AnalyzeSkeleton_ as = new AnalyzeSkeleton_(); as.setup("", tmp); // longest shortest path is what we want // stuck in a private variable // also in the image // blarghs. // the skeleton won't extend to tips... // so, we need to... // 1. downsample it and fit it with splines // 2. extend from the tangent of the end points to the boundary // 3. sample along the curve and get widths SkeletonResult sr = as.run(AnalyzeSkeleton_.SHORTEST_BRANCH, false, true, tmp, true, false); lspImage = new ImagePlus(null, as.getResultImage(true).getProcessor(1)); pointTypeImage = new ImagePlus(null, as.getResultImage(false).getProcessor(1)); Graph[] gs = sr.getGraph(); skeletons = new ArrayList<ArrayList<double[]>>(); if (gs != null) { // we found at least one skeleton for (Graph g : gs) { ArrayList<double[]> path = new ArrayList<double[]>(); ArrayList<Vertex> vertices = g.getVertices(); Vertex v = findEndPoint(vertices, pointTypeImage); addToPath(v, path, true); scores.add(getScore(g, path)); skeletons.add(path); } } skeletonImage = new ImagePlus("", threshold.getProcessor().duplicate().convertToByte(false));//threshold.duplicate(); skeletonImage.getProcessor().threshold(255); for (ArrayList<double[]> path : skeletons) { for (int i = 0; i < path.size(); i++) { skeletonImage.getProcessor().set((int)path.get(i)[0], (int)path.get(i)[1], (i+1) % 255); } } tmp.close(); } /** * Returns the percentage of covered points in the skeletong (i.e. more cycles lower score) * @param g * @param path * @return */ protected double getScore(Graph g, ArrayList<double[]> path) { double total = g.getVertices().size(); for (Edge ed : g.getEdges()) { total += ed.getSlabs().size(); } return path.size() / total; } public ArrayList<ArrayList<double[]>> getSkeletons() { return skeletons; } public ArrayList<Double> getScores() { return scores; } public ImagePlus getSkeletonImage() { return skeletonImage; } protected boolean pointEquals(double[] p1, Point p2) { return p1[0] == p2.x && p1[1] == p2.y; } protected double[] pointToDouble(Point p) { return new double[]{p.x, p.y}; } /** * Reverse engineers the graph structure of AnalyzeSkeleton to return the * longest shortest path as a list of points in order of one end to another. * @param v A vertex * @param path A path to build * @param first Whether this is the first call to this recursive function (i.e. root vertex) */ protected void addToPath(Vertex v, ArrayList<double[]> path, boolean first) { Point p = v.getPoints().get(0); path.add(pointToDouble(p)); if (first || ! isPointType(p, AnalyzeSkeleton_.END_POINT)) // check this isn't the last end point { for (Edge ed : v.getBranches()) { Point p1 = ed.getV1().getPoints().get(0); Point p2 = ed.getV2().getPoints().get(0); // this is somewhat messy, just check that we haven't touched this edge and // that it is a part of the shortest_path boolean doEdge = false; if (ed.getSlabs().size() > 0) { Point s1 = ed.getSlabs().get(0); doEdge = true; for (int i = path.size()-1; i >= 0; i--) { if (pointEquals(path.get(i), s1)) { doEdge = false; break; } } doEdge = doEdge && isPointType(s1, AnalyzeSkeleton_.SHORTEST_PATH); } if (doEdge) { // find out which direction we are going on this edge double[] pp = pointToDouble(p); boolean forward = pointEquals(pp, p1); ArrayList<Point> slab = ed.getSlabs(); if (forward) { for (int i = 0; i < slab.size(); i++) { path.add(pointToDouble(slab.get(i))); } addToPath(ed.getV2(), path, false); } else { for (int i = slab.size() - 1; i >= 0; i--) { path.add(pointToDouble(slab.get(i))); } addToPath(ed.getV1(), path, false); } } } } } /** * Returns whether the given point is of the given type. Type is one of * AnalyzeSkeleton types. * @param p * @param type * @return */ protected boolean isPointType(Point p, int type) { boolean ans = false; if (type == AnalyzeSkeleton_.SHORTEST_PATH) { ans = lspImage.getProcessor().get(p.x, p.y) == type; } else { ans = pointTypeImage.getProcessor().get(p.x, p.y) == type; } return ans; } /** * The AnalyzeSkeleton_ plugin hasn't exposed as much of its members as it should. I'm working with * what I got to find the end points (even though they are already annotated as members in the Graph class. * * @param vertices * @param pointTypeImage Image denoting the type of each point * @return */ protected Vertex findEndPoint(ArrayList<Vertex> vertices, ImagePlus pointTypeImage) { Vertex ans = null; ImageProcessor pr = pointTypeImage.getProcessor(); for (Vertex v : vertices) { for (Point p : v.getPoints()) { if (isPointType(p, AnalyzeSkeleton_.END_POINT)) { ans = v; return ans; } } } return ans; } /** * Returns the longest-shortest path tagged skeleton image. * @return */ public ImagePlus getLspImage() { return lspImage; } }
package org.avmframework.examples.testoptimization.behaviourpair; import org.avmframework.Vector; import org.avmframework.objective.NumericObjectiveValue; import org.avmframework.objective.ObjectiveFunction; import org.avmframework.objective.ObjectiveValue; import org.avmframework.variable.FixedPointVariable; import org.avmframework.variable.IntegerVariable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; public class GenerationObject extends ObjectiveFunction { private ConstraintList constraints; // Its set method uses BPair private StateMachine existingstatemachine; private List<BehaviourPair> listOfExistingBehaviourPairs; private List<BehaviourPair> listOfExistingTestCases; private List<Solution> solutionsOfExistingBehaviourPairs; private List<Solution> solutionsOfExistingTestCases; public List<BehaviourPair> getExistingTestCases() { return this.listOfExistingTestCases; } public void setExistingTestCases(List<BehaviourPair> existingTestCases) { this.listOfExistingTestCases = existingTestCases; } public void addOneTestCase2ExistingTestCases(BehaviourPair tc) { this.listOfExistingTestCases.add(tc); } public void setSolutionsOfExistingTestCases(List<Solution> existingTestCases) { this.solutionsOfExistingTestCases = existingTestCases; } public List<Solution> getSolutionsOfExistingTestCases() { return this.solutionsOfExistingTestCases; } public List<BehaviourPair> getListofExistingBehaviourPairs() { return this.listOfExistingBehaviourPairs; } /** Construct Methods. */ public GenerationObject() {} public GenerationObject(StateMachine existingstatemachine) { this.setexistingstatemachine(existingstatemachine); } /** * ===== This set function is only for initial. ==== ===== When updating, need another function, * which only change other but constraints. ===== */ public void setexistingstatemachine(StateMachine existingstatemachine) { this.existingstatemachine = existingstatemachine; this.listOfExistingBehaviourPairs = BehaviourPair.stateMachineParser(this.existingstatemachine); System.out.println( "========== length listOfExistingBehaviourPairs: " + this.listOfExistingBehaviourPairs.size()); // this.setConstraints(); } public StateMachine getexistingstatemachine() { return this.existingstatemachine; } public void emptySetOfSolutionsOfExistingBehaviourPairs() { this.solutionsOfExistingBehaviourPairs = null; } public void fillSetofSolutionsOfExistingBehaviourPairs( List<BehaviourPair> existingBehaviourPairs) { this.solutionsOfExistingBehaviourPairs = new ArrayList<Solution>(); int len = existingBehaviourPairs.size(); for (int i = 0; i < len; i++) { Solution temp = transformBPair2Solution(existingBehaviourPairs.get(i)); this.solutionsOfExistingBehaviourPairs.add(temp); } } public void initialSetOfExistingTestCases() { // EmptySetOfExistingTestCases(); this.solutionsOfExistingTestCases = new ArrayList<Solution>(); } // Used when reading BehaviourPairs from files. public Solution transformBPair2Solution(BehaviourPair bpair) { Solution sol = new Solution(); // Source State State sourceState = bpair.getSourceState(); List<State> allexistingstates = this.existingstatemachine.getAllStates(); int len = allexistingstates.size(); for (int i = 0; i < len; i++) { State currentState = allexistingstates.get(i); if (currentState.getStateName().equals(sourceState.getStateName())) { sol.addSolutionMember("SourceState", i); break; } } // Target State /** ### Need to consider whether the type is the correct type! ###. */ State targetState = bpair.getTargetState(); sol.addSolutionMember("activecall", targetState.getSystemVariables().get("activecall")); sol.addSolutionMember("videoquality", targetState.getSystemVariables().get("videoquality")); return sol; } /* * When generating test cases(BPs here, actually), traverse every constraint * and pick up one value. * type: * 0-INTEGER_CATEGORICAL_TYPE * 1-DOUBLE_CATEGORICAL_TYPE(impossible type) * 2-INTEGER_NUMERICAL_TYPE * 3-DOUBLE_NUMERICAL_TYPE * * */ public Vector setUpVector(int size) { Vector vector = new Vector(); // source state label vector.addVariable( new IntegerVariable(0, 0, this.existingstatemachine.getAllStates().size() - 1)); // target state - activecall vector.addVariable(new IntegerVariable(0, 0, 3)); // target state - videoquality vector.addVariable(new IntegerVariable(0, 0, 3)); // user operation vector.addVariable(new IntegerVariable(0, 0, 2)); // network condition - Packet Loss vector.addVariable(new FixedPointVariable(0, 1, 0, 100)); // network condition - Delay vector.addVariable(new FixedPointVariable(0, 1, 0, 100)); // network condition - Packet Duplication vector.addVariable(new FixedPointVariable(0, 1, 0, 100)); // network condition - Packet Corruption vector.addVariable(new FixedPointVariable(0, 1, 0, 100)); return vector; } public void setConstraints() { // TODO Auto-generated method stub // Scheme: from BehaviourPair to Problem Constraints this.constraints = new ConstraintList(); this.constraints.addOneConstraint( "SourceState", 0, this.existingstatemachine.getAllStates().size() - 1, 0); // Target State(search variables) this.constraints.addOneConstraint("activecall", 0, 3, 2); this.constraints.addOneConstraint("videoquality", 0, 3, 2); this.constraints.addOneConstraint("UserOperation", 0, 2, 0); // 0-null, 1-dial, 2-disconnect this.constraints.addOneConstraint("PacketLoss", 0, 100, 3); this.constraints.addOneConstraint("PacketDelay", 0, 100, 3); this.constraints.addOneConstraint("PacketDuplication", 0, 100, 3); this.constraints.addOneConstraint("PacketCorruption", 0, 100, 3); } public ConstraintList getConstraints() { // TODO Auto-generated method stub return this.constraints; } public ObjectiveValue computeObjectiveValue(Vector vector) { // first, the similarity of the current solution with the existing behavior pairs which are from // state machine. double resultSimilarity = 0; List<Transition> listTransition = this.getexistingstatemachine().getAllTransitions(); int len1 = listTransition.size(); int clen1 = len1; for (int i = 0; i < len1; i++) { double tempp = similarityBetweenVectorAndTransition(vector, listTransition.get(i)); if (tempp == 0) { clen1--; } else { resultSimilarity = resultSimilarity + tempp; } } resultSimilarity = resultSimilarity / clen1; // second, diversity with the existing solutions // Using the solutions of solutionsOfExistingTestCases directly. int len2 = this.solutionsOfExistingTestCases.size(); double resultDiversity = 0; double result = 0; for (int i = 0; i < len2; i++) { resultDiversity = resultDiversity + diversityOfTwoVectors(vector, this.solutionsOfExistingTestCases.get(i)); } resultDiversity = resultDiversity / len2; result = maxMinNormalization(resultDiversity, 0, 3) + maxMinNormalization(resultSimilarity, 0, 3); result = 1 - result / 2; return NumericObjectiveValue.lowerIsBetterObjectiveValue(result, 0); } public static double normalization(double num) { double nor = num / (num + 1); return nor; } public static double maxMinNormalization(double num, double min, double max) { double nor = (num - min) / (max - min); return nor; } /** ValueSet. first version */ public double variableConstraintConstraintDistance(ValueSet vc, ValueSet bpc) { int lengthValuesc = vc.getValueSet().size(); int lbpc = bpc.getValueSet().size(); double distance = 0; if (lengthValuesc == 1 && lbpc == 1) { String con0 = vc.getValueSet().get(0); String con1 = bpc.getValueSet().get(0); String[] srcs0 = con0.split(" "); String[] srcs1 = con1.split(" "); if (srcs0[1].equals("==") && srcs1[1].equals("==")) { int temp0 = Integer.parseInt(srcs0[2]); int temp1 = Integer.parseInt(srcs1[2]); distance = normalization((double) temp0) - normalization((double) temp1); } else { System.out.println("Warning: Illegal!"); } } else if (lengthValuesc == 1 && lbpc == 2) { String con00 = vc.getValueSet().get(0); String con10 = bpc.getValueSet().get(0); String con11 = bpc.getValueSet().get(1); int temp00 = 0; int temp10 = 0; int temp11 = 0; String[] srcs00 = con00.split(" "); String[] srcs10 = con10.split(" "); String[] srcs11 = con11.split(" "); double mid1 = 1; if (srcs00[1].equals("==")) { temp00 = Integer.parseInt(srcs00[2]); } else { System.out.println("Warning: Illegal!"); } if ((srcs10[1].equals("<") || srcs10[1].equals("<=")) && (srcs11[1].equals(">") || srcs11[1].equals(">="))) { temp11 = Integer.parseInt(srcs10[2]); temp10 = Integer.parseInt(srcs11[2]); mid1 = ((double) temp11 - (double) temp10) / 2 + (double) temp10; } else if ((srcs10[1].equals(">") || srcs10[1].equals(">=")) && (srcs11[1].equals("<") || srcs11[1].equals("<="))) { temp11 = Integer.parseInt(srcs11[2]); temp10 = Integer.parseInt(srcs10[2]); mid1 = ((double) temp11 - (double) temp10) / 2 + (double) temp10; } else { System.out.println("Warning: Illegal!"); } distance = normalization((double) temp00) - normalization(mid1); } else if (lengthValuesc == 2 && lbpc == 1) { String con00 = vc.getValueSet().get(0); String con01 = vc.getValueSet().get(1); String con11 = bpc.getValueSet().get(0); int temp00 = 0; int temp01 = 0; int temp11 = 0; String[] srcs00 = con00.split(" "); String[] srcs01 = con01.split(" "); String[] srcs11 = con11.split(" "); double mid0 = 1; if ((srcs00[1].equals("<") || srcs00[1].equals("<=")) && (srcs01[1].equals(">") || srcs01[1].equals(">="))) { temp01 = Integer.parseInt(srcs00[2]); temp00 = Integer.parseInt(srcs01[2]); mid0 = ((double) temp01 - (double) temp00) / 2 + (double) temp00; } else if ((srcs00[1].equals(">") || srcs00[1].equals(">=")) && (srcs01[1].equals("<") || srcs01[1].equals("<="))) { temp01 = Integer.parseInt(srcs01[2]); temp00 = Integer.parseInt(srcs00[2]); mid0 = ((double) temp01 - (double) temp00) / 2 + (double) temp00; } else { System.out.println("Warning: Illegal!"); } if (srcs11[1].equals("==")) { temp11 = Integer.parseInt(srcs11[2]); } else { System.out.println("Warning: Illegal!"); } distance = normalization(mid0) - normalization((double) temp11); } else if (lengthValuesc == 2 && lbpc == 2) { String con00 = vc.getValueSet().get(0); String con01 = vc.getValueSet().get(1); String con10 = bpc.getValueSet().get(0); String con11 = bpc.getValueSet().get(1); int temp00 = 0; int temp01 = 0; int temp10 = 0; int temp11 = 0; String[] srcs00 = con00.split(" "); String[] srcs01 = con01.split(" "); String[] srcs10 = con10.split(" "); String[] srcs11 = con11.split(" "); double mid0 = 0; double mid1 = 1; if ((srcs00[1].equals("<") || srcs00[1].equals("<=")) && (srcs01[1].equals(">") || srcs01[1].equals(">="))) { temp01 = Integer.parseInt(srcs00[2]); temp00 = Integer.parseInt(srcs01[2]); mid0 = ((double) temp01 - (double) temp00) / 2 + (double) temp00; } else if ((srcs00[1].equals(">") || srcs00[1].equals(">=")) && (srcs01[1].equals("<") || srcs01[1].equals("<="))) { temp01 = Integer.parseInt(srcs01[2]); temp00 = Integer.parseInt(srcs00[2]); mid0 = ((double) temp01 - (double) temp00) / 2 + (double) temp00; } else { System.out.println("Warning: Illegal!"); } if ((srcs10[1].equals("<") || srcs10[1].equals("<=")) && (srcs11[1].equals(">") || srcs11[1].equals(">="))) { temp11 = Integer.parseInt(srcs10[2]); temp10 = Integer.parseInt(srcs11[2]); mid1 = ((double) temp11 - (double) temp10) / 2 + (double) temp10; } else if ((srcs10[1].equals(">") || srcs10[1].equals(">=")) && (srcs11[1].equals("<") || srcs11[1].equals("<="))) { temp11 = Integer.parseInt(srcs11[2]); temp10 = Integer.parseInt(srcs10[2]); mid1 = ((double) temp11 - (double) temp10) / 2 + (double) temp10; } else { System.out.println("Warning: Illegal!"); } distance = normalization(mid1) - normalization(mid0); } else { System.out.println("Warning: Illegal constraints!"); } return distance; } public double variableValueConstraintDistance(int vv, ValueSet bpc) { int length = bpc.getValueSet().size(); double distance = 0; if (length == 1) { String con = bpc.getValueSet().get(0); String[] srcs = con.split(" "); if (srcs[1].equals("==")) { int tempv1 = Integer.parseInt(srcs[2]); // System.out.println("srcs[2]: " + srcs[2]); distance = normalization((double) vv) - normalization((double) tempv1); } else if (srcs[1].equals("<")) { System.out.println("For now, Illegal constraint."); } else if (srcs[1].equals("<=")) { System.out.println("For now, Illegal constraint."); } else if (srcs[1].equals(">")) { System.out.println("For now, Illegal constraint."); } else if (srcs[1].equals(">=")) { System.out.println("For now, Illegal constraint."); } else { System.out.println("Warning: Illegal constraint!"); } } else if (length == 2) { // Only considering part of situation String con0 = bpc.getValueSet().get(0); String con1 = bpc.getValueSet().get(1); int tempv1 = 0; int tempv2 = 0; String[] srcs0 = con0.split(" "); if (srcs0[1].equals("==")) { System.out.println("For now, Illegal constraint."); } else if (srcs0[1].equals("<")) { tempv2 = Integer.parseInt(srcs0[2]); } else if (srcs0[1].equals("<=")) { tempv2 = Integer.parseInt(srcs0[2]); } else if (srcs0[1].equals(">")) { tempv1 = Integer.parseInt(srcs0[2]); } else if (srcs0[1].equals(">=")) { tempv1 = Integer.parseInt(srcs0[2]); } else { System.out.println("Warning: Illegal constraint!"); } String[] srcs1 = con1.split(" "); if (srcs1[1].equals("==")) { System.out.println("For now, Illegal constraint."); } else if (srcs1[1].equals("<")) { tempv2 = Integer.parseInt(srcs0[2]); } else if (srcs1[1].equals("<=")) { tempv2 = Integer.parseInt(srcs0[2]); } else if (srcs1[1].equals(">")) { tempv1 = Integer.parseInt(srcs0[2]); } else if (srcs1[1].equals(">=")) { tempv1 = Integer.parseInt(srcs0[2]); } else { System.out.println("Warning: Illegal constraint!"); } double mid = ((double) tempv2 - (double) tempv1) / 2 + (double) tempv1; distance = normalization((double) vv) - normalization(mid); } else { System.out.println("Warning: illegal ValueSet!"); } return distance; } /** 201606 ValueSet the second version. */ public double diversityOfTwoVectors(Vector v1, Solution v2) { // Distance of two solutions // map all variables into the interval (0, 1), then use measures, for instance, Euclidean // metric. // HashMap<String, Object> solution_v1 = v1.getsolution(); HashMap<String, Object> solutionv2 = v2.getsolution(); // v1 // source state State sourceStatev1 = this.existingstatemachine .getAllStates() .get(Integer.parseInt(v1.getVariable(0).toString())); // source state v2 State sourceStatev2 = this.existingstatemachine.getAllStates().get((int) solutionv2.get("SourceState")); double distanceSourceActiveCall = variableCalculateConstraintDistancev201606( sourceStatev1.getSystemVariablesValueSet().get("activecall"), sourceStatev2.getSystemVariablesValueSet().get("activecall")); double distanceSourceVideoQuality = variableCalculateConstraintDistancev201606( sourceStatev1.getSystemVariablesValueSet().get("videoquality"), sourceStatev2.getSystemVariablesValueSet().get("videoquality")); // target state int activeCallTargetStatev1 = Integer.parseInt(v1.getVariable(1).toString()); int videoQualityTargetStatev1 = Integer.parseInt(v1.getVariable(2).toString()); double norActiveCallTargetStatev1 = normalization((double) activeCallTargetStatev1); double norVideoQualityTargetStatev1 = normalization((double) videoQualityTargetStatev1); // user operations int userOperationv1 = Integer.parseInt(v1.getVariable(3).toString()); double norUserOperationv1 = normalization((double) userOperationv1); // network environment double packetLossv1 = Double.parseDouble(v1.getVariable(4).toString()); double packetDelayv1 = Double.parseDouble(v1.getVariable(5).toString()); double packetDuplicationv1 = Double.parseDouble(v1.getVariable(6).toString()); double packetCorruptionv1 = Double.parseDouble(v1.getVariable(7).toString()); double norPacketLossv1 = normalization(packetLossv1); double norPacketDelayv1 = normalization(packetDelayv1); double norPacketDuplicationv1 = normalization(packetDuplicationv1); double norPacketCorruptionv1 = normalization(packetCorruptionv1); // v2 // target state int activeCallTargetStatev2 = (int) solutionv2.get("activecall"); int videoQualityTargetStatev2 = (int) solutionv2.get("videoquality"); double norActiveCallTargetStatev2 = normalization((double) activeCallTargetStatev2); double norVideoQualityTargetStatev2 = normalization((double) videoQualityTargetStatev2); // user operations int userOperationv2 = (int) solutionv2.get("UserOperation"); double norUserOperationv2 = normalization((double) userOperationv2); // network environment double packetLossv2 = (double) solutionv2.get("PacketLoss"); double packetDelayv2 = (double) solutionv2.get("PacketDelay"); double packetDuplicationv2 = (double) solutionv2.get("PacketDuplication"); double packetCorruptionv2 = (double) solutionv2.get("PacketCorruption"); double norPacketLossv2 = normalization(packetLossv2); double norPacketDelayv2 = normalization(packetDelayv2); double norPacketDuplicationv2 = normalization(packetDuplicationv2); double norPacketCorruptionv2 = normalization(packetCorruptionv2); // Eculidean Metric double result = 0; result = Math.pow(normalization(distanceSourceActiveCall), 2) + Math.pow(normalization(distanceSourceVideoQuality), 2) + Math.pow((norActiveCallTargetStatev1 - norActiveCallTargetStatev2), 2) + Math.pow((norVideoQualityTargetStatev1 - norVideoQualityTargetStatev2), 2) + Math.pow((norUserOperationv1 - norUserOperationv2), 2) + Math.pow((norPacketLossv1 - norPacketLossv2), 2) + Math.pow((norPacketDelayv1 - norPacketDelayv2), 2) + Math.pow((norPacketDuplicationv1 - norPacketDuplicationv2), 2) + Math.pow((norPacketCorruptionv1 - norPacketCorruptionv2), 2); result = Math.sqrt(result); return result; } public double similarityBetweenVectorAndTransition( Vector v1, Transition t1) { // 9 variables in space double result = 3 - distanceBetweenVectorAndTransition(v1, t1); return result; } public double distanceBetweenVectorAndTransition( Vector v1, Transition t1) { // Distance of two solutions // map all variables into the interval (0, 1), then use measures, for instance, Euclidean // metric. // Distances of the source states State sourceStatev1 = this.existingstatemachine .getAllStates() .get(Integer.parseInt(v1.getVariable(0).toString())); State sourceStateT = t1.getSourceState(); double distanceSourceActiveCall = variableCalculateConstraintDistancev201606( sourceStatev1.getSystemVariablesValueSet().get("activecall"), t1.getSourceState().getSystemVariablesValueSet().get("activecall")); double distanceSourceVideoQuality = variableCalculateConstraintDistancev201606( sourceStatev1.getSystemVariablesValueSet().get("videoquality"), t1.getSourceState().getSystemVariablesValueSet().get("videoquality")); double norDistanceSourceActiveCall = normalization(distanceSourceActiveCall); double norDistanceSourceVideoQuality = normalization(distanceSourceVideoQuality); // Distances of the target states int activeCallTargetStatev1 = Integer.parseInt(v1.getVariable(1).toString()); int videoQualityTargetStatev1 = Integer.parseInt(v1.getVariable(2).toString()); ValueSet activeCallTargetStateT = t1.getTargetState().getSystemVariablesValueSet().get("activecall"); ValueSet videoQualityTargetStateT = t1.getTargetState().getSystemVariablesValueSet().get("videoquality"); double distanceTargetActiveCall = variableCalculateConstraintDistancev201606( activeCallTargetStatev1, activeCallTargetStateT); double distanceTargetVideoQuality = variableCalculateConstraintDistancev201606( videoQualityTargetStatev1, videoQualityTargetStateT); double norDistanceTargetActiveCall = normalization(distanceTargetActiveCall); double norDistanceTargetVideoQuality = normalization(distanceTargetVideoQuality); // Distances of the triggers int userOperationv1 = Integer.parseInt(v1.getVariable(3).toString()); double norUserOperationv1 = normalization((double) userOperationv1); int userOperationBehaviourPair; String userOperationStringBehaviourPair = t1.getTriggers().get(0); if (userOperationStringBehaviourPair.equals("null")) { userOperationBehaviourPair = 0; } else if (userOperationStringBehaviourPair.equals("dial")) { userOperationBehaviourPair = 1; } else if (userOperationStringBehaviourPair.equals("disconnect")) { userOperationBehaviourPair = 2; } else { userOperationBehaviourPair = -1; System.out.println("***** Warning: calculate triggers! *****"); } double norUserOperationBehaviourPair = normalization((double) userOperationBehaviourPair); double distanceTrigger = norUserOperationBehaviourPair - norUserOperationv1; // Distances of Network Environment double packetLossv1 = Double.parseDouble(v1.getVariable(4).toString()); double packetDelayv1 = Double.parseDouble(v1.getVariable(5).toString()); double packetDuplicationv1 = Double.parseDouble(v1.getVariable(6).toString()); double packetCorruptionv1 = Double.parseDouble(v1.getVariable(7).toString()); ValueSet packetLossT = t1.getConditions().get("PacketLoss"); ValueSet packetDelayT = t1.getConditions().get("PacketDelay"); ValueSet packetDuplicationT = t1.getConditions().get("PacketDuplication"); ValueSet packetCorruptionT = t1.getConditions().get("PacketCorruption"); double distancePacketLoss = variableCalculateConstraintDistancev201606(packetLossv1, packetLossT); double distancePacketDelay = variableCalculateConstraintDistancev201606(packetDelayv1, packetDelayT); double distancePacketDuplication = variableCalculateConstraintDistancev201606(packetDuplicationv1, packetDuplicationT); double distancePacketCorruption = variableCalculateConstraintDistancev201606(packetCorruptionv1, packetCorruptionT); double norDistancePacketLoss = normalization(distancePacketLoss); double norDistancePacketDelay = normalization(distancePacketDelay); double norDistancePacketDuplication = normalization(distancePacketDuplication); double norDistancePacketCorruption = normalization(distancePacketCorruption); double result = 0; result = Math.pow(norDistanceSourceActiveCall, 2) + Math.pow(norDistanceSourceVideoQuality, 2) + Math.pow(norDistanceTargetActiveCall, 2) + Math.pow(norDistanceTargetVideoQuality, 2) + Math.pow(distanceTrigger, 2) + Math.pow(norDistancePacketLoss, 2) + Math.pow(norDistancePacketDelay, 2) + Math.pow(norDistancePacketDuplication, 2) + Math.pow(norDistancePacketCorruption, 2); result = Math.sqrt(result); if (packetLossv1 < 10 || packetDelayv1 < 1 || packetDuplicationv1 < 1 || packetCorruptionv1 < 1) { return 3.0; } if (distancePacketLoss < 1 || distancePacketDelay < 1 || distancePacketDuplication < 1 || distancePacketCorruption < 1) { return 3.0; } if (distancePacketLoss + distancePacketDelay + distancePacketDuplication + distancePacketCorruption < 20) { return 3.0; } return result; } public double variableCalculateConstraintDistancev201606(double vv, ValueSet bpc) { List<ValueSetConstraint> bpcList = new ArrayList<ValueSetConstraint>(); if (bpc == null) { System.out.println("bpc == null"); } if (bpc.getValueSet().isEmpty()) { System.out.println("bpc.getValueSet().isEmpty()"); } int length = bpc.getValueSet().size(); for (int i = 0; i < length; i++) { String[] srcs = bpc.getValueSet().get(i).split(" "); ValueSetConstraint temp = new ValueSetConstraint(); temp.varLeft = srcs[0]; temp.operator = srcs[1]; temp.varRight = srcs[2]; bpcList.add(temp); // System.out.println("Operator:" + temp.operator); } int numOfLessThan = 0; int numOfGreaterThan = 0; int numOfEqualTo = 0; for (int i = 0; i < length; i++) { if (bpcList.get(i).operator.equals("==")) { // System.out.println("=="); numOfEqualTo++; } else if (bpcList.get(i).operator.equals("<=") || bpcList.get(i).operator.equals("<")) { numOfLessThan++; // System.out.println("Less"); } else if (bpcList.get(i).operator.equals(">") || bpcList.get(i).operator.equals(">=")) { numOfGreaterThan++; // System.out.println("Greater"); } else { System.out.println("***** Error: Wrong operator! --- *****"); return -1; } } if ((numOfLessThan >= 2) || (numOfGreaterThan >= 2) || (numOfLessThan == 1 && numOfGreaterThan >= 2) || (numOfLessThan >= 2 && numOfGreaterThan == 1)) { System.out.println("***** Warning: Unsolved situation 1! *****"); return -1; } double distance = 0; // Normalization vv = normalization(vv); if (numOfLessThan == 0 && numOfGreaterThan == 0) { if (numOfEqualTo == 1) { distance = vv - normalization(Double.parseDouble(bpcList.get(0).varRight)); if (distance < 0) { distance = distance * (-1); } } else { System.out.println("***** Warning: Some Impossible situation! *****"); return -1; } } else if (numOfLessThan == 0 && numOfGreaterThan == 1) { double scrop = 0; for (int i = 0; i < length; i++) { if (bpcList.get(i).operator.equals(">") || bpcList.get(i).operator.equals(">=")) { scrop = Double.parseDouble(bpcList.get(i).varRight); } } if (vv >= scrop) { distance = 0; } else { distance = normalization(scrop) - vv; } } else if (numOfLessThan == 1 && numOfGreaterThan == 0) { double scrop = 0; for (int i = 0; i < length; i++) { if (bpcList.get(i).operator.equals("<") || bpcList.get(i).operator.equals("<=")) { scrop = Double.parseDouble(bpcList.get(i).varRight); } } if (vv <= scrop) { distance = 0; } else { distance = vv - normalization(scrop); } } else if (numOfLessThan == 1 && numOfGreaterThan == 1) { double scropG = 0; double scropL = 0; for (int i = 0; i < length; i++) { if (bpcList.get(i).operator.equals(">") || bpcList.get(i).operator.equals(">=")) { scropG = Double.parseDouble(bpcList.get(i).varRight); } if (bpcList.get(i).operator.equals("<") || bpcList.get(i).operator.equals("<=")) { scropL = Double.parseDouble(bpcList.get(i).varRight); } } if (scropG <= scropL) { if (vv <= scropL && vv >= scropG) { distance = 0; } else if (vv <= scropL && vv <= scropG) { distance = normalization(scropG) - vv; } else if (vv >= scropL && vv >= scropG) { distance = vv - normalization(scropL); } else { System.out.println("***** Warning: Impossible situation! *****"); return -1; } } else { if (vv >= scropG || vv <= scropL) { distance = 0; } else { double t1 = normalization(scropG) - vv; double t2 = vv - normalization(scropL); if (t1 >= t2) { distance = t2; } else { distance = t1; } } } } else { System.out.println("***** Warning: Unsolved situation 2! *****"); return -1; } return distance; } public double variableCalculateConstraintDistancev201606(ValueSet vv, ValueSet bpc) { List<ValueSetConstraint> bpcListV = new ArrayList<ValueSetConstraint>(); List<ValueSetConstraint> bpcListBehaviourPair = new ArrayList<ValueSetConstraint>(); int lengthValues = vv.getValueSet().size(); int lbp = bpc.getValueSet().size(); for (int i = 0; i < lengthValues; i++) { String[] srcs = vv.getValueSet().get(i).split(" "); ValueSetConstraint temp = new ValueSetConstraint(); temp.varLeft = srcs[0]; temp.operator = srcs[1]; temp.varRight = srcs[2]; bpcListV.add(temp); } for (int i = 0; i < lbp; i++) { String[] srcs = bpc.getValueSet().get(i).split(" "); ValueSetConstraint temp = new ValueSetConstraint(); temp.varLeft = srcs[0]; temp.operator = srcs[1]; temp.varRight = srcs[2]; bpcListBehaviourPair.add(temp); } int numOfLessThanV = 0; int numOfGreaterThanV = 0; int numOfEqualToV = 0; int numOfLessThanBehaviourPair = 0; int numOfGreaterThanBehaviourPair = 0; int numOfEqualToBehaviourPair = 0; for (int i = 0; i < lengthValues; i++) { if (bpcListV.get(i).operator.equals("==")) { numOfEqualToV++; } else if (bpcListV.get(i).operator.equals("<=") || bpcListV.get(i).operator.equals("<")) { numOfLessThanV++; } else if (bpcListV.get(i).operator.equals(">=") || bpcListV.get(i).operator.equals(">")) { numOfGreaterThanV++; } else { System.out.println("***** Error: Wrong operator1! *****"); return -1; } } for (int i = 0; i < lbp; i++) { if (bpcListBehaviourPair.get(i).operator.equals("==")) { numOfEqualToBehaviourPair++; } else if (bpcListBehaviourPair.get(i).operator.equals("<=") || bpcListBehaviourPair.get(i).operator.equals("<")) { numOfLessThanBehaviourPair++; } else if (bpcListBehaviourPair.get(i).operator.equals(">=") || bpcListBehaviourPair.get(i).operator.equals(">")) { numOfGreaterThanBehaviourPair++; } else { System.out.println("***** Error: Wrong operator2! *****"); return -1; } } if ((numOfLessThanV >= 2) || (numOfGreaterThanV >= 2) || (numOfLessThanBehaviourPair >= 2) || (numOfGreaterThanBehaviourPair >= 2)) { System.out.println("***** Warning: Unsolved situation 3! *****"); return -1; } if (lengthValues != lbp) { return 1; } double distance = 0; if (numOfLessThanV == numOfLessThanBehaviourPair && numOfGreaterThanV == numOfGreaterThanBehaviourPair) { if (numOfLessThanV == 0 && numOfGreaterThanV == 0) { if (numOfEqualToV == 1 && numOfEqualToBehaviourPair == 1) { if (Double.parseDouble(bpcListV.get(0).varRight) == Double.parseDouble(bpcListBehaviourPair.get(0).varRight)) { distance = 0; } else { distance = 1; } } else { System.out.println("***** Warning: Unsolved situation 5! *****"); return -1; } } else if (numOfLessThanV == 1 && numOfGreaterThanV == 0) { double scropV = 0; double scropBehaviourPair = 0; for (int i = 0; i < lengthValues; i++) { if (bpcListV.get(i).operator.equals("<") || bpcListV.get(i).operator.equals("<=")) { scropV = Double.parseDouble(bpcListV.get(i).varRight); } } for (int i = 0; i < lbp; i++) { if (bpcListBehaviourPair.get(i).operator.equals("<") || bpcListBehaviourPair.get(i).operator.equals("<=")) { scropBehaviourPair = Double.parseDouble(bpcListBehaviourPair.get(i).varRight); } } if (scropV == scropBehaviourPair) { distance = 0; } else { distance = 1; } } else if (numOfLessThanV == 0 && numOfGreaterThanV == 1) { double scropV = 0; double scropBehaviourPair = 0; for (int i = 0; i < lengthValues; i++) { if (bpcListV.get(i).operator.equals(">") || bpcListV.get(i).operator.equals(">=")) { scropV = Double.parseDouble(bpcListV.get(i).varRight); } } for (int i = 0; i < lbp; i++) { if (bpcListBehaviourPair.get(i).operator.equals(">") || bpcListBehaviourPair.get(i).operator.equals(">=")) { scropBehaviourPair = Double.parseDouble(bpcListBehaviourPair.get(i).varRight); } } if (scropV == scropBehaviourPair) { distance = 0; } else { distance = 1; } } else if (numOfLessThanV == 1 && numOfGreaterThanV == 1) { double scropGreaterThanV = 0; double scropLessThanV = 0; double scropGreaterThanBehaviourPair = 0; double scropLessThanBehaviourPair = 0; for (int i = 0; i < lengthValues; i++) { if (bpcListV.get(i).operator.equals("<") || bpcListV.get(i).operator.equals("<=")) { scropLessThanV = Double.parseDouble(bpcListV.get(i).varRight); } if (bpcListV.get(i).operator.equals(">") || bpcListV.get(i).operator.equals(">=")) { scropGreaterThanV = Double.parseDouble(bpcListV.get(i).varRight); } } for (int i = 0; i < lbp; i++) { if (bpcListBehaviourPair.get(i).operator.equals("<") || bpcListBehaviourPair.get(i).operator.equals("<=")) { scropLessThanBehaviourPair = Double.parseDouble(bpcListBehaviourPair.get(i).varRight); } if (bpcListBehaviourPair.get(i).operator.equals(">") || bpcListBehaviourPair.get(i).operator.equals(">=")) { scropGreaterThanBehaviourPair = Double.parseDouble(bpcListBehaviourPair.get(i).varRight); } } if (scropLessThanV == scropLessThanBehaviourPair && scropGreaterThanV == scropGreaterThanBehaviourPair) { distance = 0; } else { distance = 1; } } else { System.out.println("***** Warning: Unsolved situation 6! *****"); return -1; } } else { System.out.println("***** Warning: Unsolved situation 7! *****"); return -1; } return distance; } class ValueSetConstraint { String varLeft; String operator; String varRight; } }
/***************************************************************************** * * AIRBIQUITY PROPRIETARY INFORMATION * * The information contained herein is proprietary to Airbiquity * and shall not be reproduced or disclosed in whole or in part * or used for any design or manufacture * without direct written authorization from Airbiquity. * * Copyright (c) 2012 by Airbiquity. All rights reserved. * *****************************************************************************/ package com.airbiquity.mcs.iostream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import android.bluetooth.BluetoothSocket; import android.util.Log; import com.airbiquity.mcs.common.AbstractMcsLayer; /** * An McsLayer that use Bluetooth socket for data transfer. */ public class BtLayer extends AbstractMcsLayer { private static final String TAG = "BtLayer"; private InputStream m_input = null; private OutputStream m_output = null; private boolean isNotificationSent = false; private byte[] buf; private boolean isCanceled = false; /** Number of bytes we have read into 'buf' but didn't write out yet. * We don't read anything from the input stream until this is reset 0 again.*/ private int m_available = 0; /** Number of bytes we have read from 'buf' and wrote out. */ private int m_read = 0; private Thread readThread; private Thread notificationThread; private BluetoothSocket btSocket; /** * Constructor. Creates and starts the threads. */ public BtLayer() { buf = new byte[16*1024]; } /** * Set Bluetooth Socket and open it. * @param socket */ synchronized public void openSocket( BluetoothSocket socket ) { try { btSocket = socket; m_input = btSocket.getInputStream(); m_output = btSocket.getOutputStream(); notificationThread = new NotificationThread(); readThread = new ReadThread(); notificationThread.start(); readThread.start(); } catch( IOException e ) { Log.e( TAG, "", e ); } } /** * Closes internal Bluetooth socket. */ synchronized public void closeSocket() { try { btSocket.close(); close(); btSocket = null; } catch( IOException e ) { Log.e( TAG, "", e ); } } /** * Checks if the BT socket is open. * @return true if so. */ synchronized public boolean isOpen() { return( btSocket != null ); } /** * Reads data from this layer. (Called from the Upper layer) * @param buffer : buffer where to put the data. * @param size : size of the buffer. * @return size of the data written into the buffer. */ public int readData( byte[] buffer, int size ) { synchronized( this ) { isNotificationSent = false; if( m_input != null ) { if( m_available < 1 ) return 0; int bytesToRead = m_available - m_read; if( bytesToRead > size ) bytesToRead = size; System.arraycopy( buf, m_read, buffer, 0, bytesToRead ); m_read += bytesToRead; if( m_read >= m_available ) { m_read = 0; m_available = 0; } return bytesToRead; } return 0; } } /** * Handles IO exception. Closes connection. * * @param ex */ private void onIOException( IOException e ) { Log.i( TAG, "BT Connection Lost "+ e.toString() ); close(); try { if( btSocket != null ) btSocket.close(); } catch( IOException ex ) { Log.e( TAG, "", ex ); } btSocket = null; } /** * Writes data to output IO stream */ public void writeData( byte[] buffer, int size ) { OutputStream out = null; synchronized( this ) { out = m_output; } try { if( null != out ) out.write( buffer, 0, size ); } catch( IOException ex ) { onIOException( ex ); } } /** * Close the streams, remove listeners, notify upper layer, kill the threads. */ public void close() { synchronized( this ) { isCanceled = true; if( m_input != null ) { try { m_input.close(); } catch( IOException e ) {} m_input = null; } if( m_output != null ) { try { m_output.close(); } catch( IOException e ) { } m_output = null; } } tellConnectionClosed(); removeAllListeners(); } /** * Notification thread which checks in a loop if new data is available and then notifies the upper layers. */ private class NotificationThread extends Thread { public NotificationThread() { super("NotificationThread"); } public void run() { while( !isCanceled ) { try { Thread.sleep( 20 ); } catch( InterruptedException ex ) { return; } boolean sendNotification = false; synchronized( this ) { if( m_input != null && m_available > 0 && !isNotificationSent ) { isNotificationSent = true; sendNotification = true; } } if( sendNotification ) tellDataReceived(); } } } /** * Read thread. */ private class ReadThread extends Thread { public ReadThread() { super("ReadThread"); } public void run() { while( !isCanceled ) { if( m_input != null && m_available == 0 ) { try { m_available = m_input.read( buf, 0, buf.length ); } catch( IOException e ) { onIOException( e ); } } else { try { Thread.sleep( 10 ); } catch( InterruptedException e ) { return; } } } } } }
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.config; import com.google.gson.Gson; import com.thoughtworks.go.config.preprocessor.SkipParameterResolution; import com.thoughtworks.go.config.validation.NameTypeValidator; import com.thoughtworks.go.domain.ArtifactType; import com.thoughtworks.go.domain.ConfigErrors; import com.thoughtworks.go.domain.config.Configuration; import com.thoughtworks.go.domain.config.ConfigurationProperty; import com.thoughtworks.go.plugin.access.artifact.ArtifactMetadataStore; import com.thoughtworks.go.plugin.domain.artifact.ArtifactPluginInfo; import org.apache.commons.lang3.StringUtils; import java.util.Arrays; import java.util.HashMap; import java.util.List; import static org.apache.commons.lang3.StringUtils.isNotBlank; @AttributeAwareConfigTag(value = "artifact", attribute = "type", attributeValue = "external") public class PluggableArtifactConfig implements ArtifactTypeConfig { private final ConfigErrors errors = new ConfigErrors(); @SkipParameterResolution @ConfigAttribute(value = "id", allowNull = true) protected String id; @ConfigAttribute(value = "storeId", allowNull = true) private String storeId; @ConfigSubtag private Configuration configuration = new Configuration(); public static final String ID = "id"; public static final String STORE_ID = "storeId"; public PluggableArtifactConfig() { } public PluggableArtifactConfig(String id, String storeId, ConfigurationProperty... configurationProperties) { this.configuration.addAll(Arrays.asList(configurationProperties)); this.id = id; this.storeId = storeId; } public Configuration getConfiguration() { return configuration; } public void setConfiguration(Configuration configuration) { this.configuration = configuration; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getStoreId() { return storeId; } public void setStoreId(String storeId) { this.storeId = storeId; } @Override public ArtifactType getArtifactType() { return ArtifactType.external; } @Override public String getArtifactTypeValue() { return "Pluggable Artifact"; } @Override public boolean validateTree(ValidationContext validationContext) { validate(validationContext); return !hasErrors(); } public void encryptSecureProperties(CruiseConfig preprocessedCruiseConfig, PluggableArtifactConfig preprocessedPluggableArtifactConfig) { if (storeId != null) { ArtifactStore artifactStore = preprocessedCruiseConfig.getArtifactStores().find(preprocessedPluggableArtifactConfig.getStoreId()); encryptSecureConfigurations(artifactStore); } } @Override public void validate(ValidationContext validationContext) { validateMandatoryAttributes(); configuration.validateUniqueness(getArtifactTypeValue()); if (!new NameTypeValidator().isNameValid(id)) { errors.add("id", NameTypeValidator.errorMessage("pluggable artifact id", id)); } if (validationContext.isWithinPipelines()) { if (isNotBlank(storeId)) { final ArtifactStore artifactStore = validationContext.artifactStores().find(storeId); if (artifactStore == null) { addError("storeId", String.format("Artifact store with id `%s` does not exist. Please correct the `storeId` attribute on pipeline `%s`.", storeId, validationContext.getPipeline().name())); } } } } private void validateMandatoryAttributes() { if (StringUtils.isBlank(this.id)) { errors.add("id", "\"Id\" is required for PluggableArtifact"); } if (StringUtils.isBlank(this.storeId)) { errors.add("storeId", "\"Store id\" is required for PluggableArtifact"); } } public boolean hasValidPluginAndStore(ArtifactStore artifactStore) { if (artifactStore == null) { return false; } ArtifactPluginInfo pluginInfo = ArtifactMetadataStore.instance().getPluginInfo(artifactStore.getPluginId()); return pluginInfo != null; } @Override public void validateUniqueness(List<ArtifactTypeConfig> existingArtifactTypeConfigList) { for (ArtifactTypeConfig existingArtifactTypeConfig : existingArtifactTypeConfigList) { if (existingArtifactTypeConfig instanceof PluggableArtifactConfig) { final PluggableArtifactConfig pluggableArtifactConfig = (PluggableArtifactConfig) existingArtifactTypeConfig; if (this.getId().equalsIgnoreCase(pluggableArtifactConfig.getId())) { this.addError("id", String.format("Duplicate pluggable artifacts with id `%s` defined.", getId())); existingArtifactTypeConfig.addError("id", String.format("Duplicate pluggable artifacts with id `%s` defined.", getId())); } if (this.getStoreId().equalsIgnoreCase(pluggableArtifactConfig.getStoreId())) { if (configuration.size() == pluggableArtifactConfig.getConfiguration().size() && this.configuration.containsAll(pluggableArtifactConfig.getConfiguration())) { this.addError("id", "Duplicate pluggable artifacts configuration defined."); existingArtifactTypeConfig.addError("id", "Duplicate pluggable artifacts configuration defined."); } } return; } } existingArtifactTypeConfigList.add(this); } public String toJSON() { final HashMap<String, Object> artifactStoreAsHashMap = new HashMap<>(); artifactStoreAsHashMap.put("id", getId()); artifactStoreAsHashMap.put("storeId", getStoreId()); artifactStoreAsHashMap.put("configuration", this.getConfiguration().getConfigurationAsMap(true)); return new Gson().toJson(artifactStoreAsHashMap); } @Override public ConfigErrors errors() { return errors; } public boolean hasErrors() { return !errors.isEmpty() || configuration.hasErrors(); } public List<ConfigErrors> getAllErrors() { return ErrorCollector.getAllErrors(this); } @Override public void addError(String fieldName, String message) { errors.add(fieldName, message); } private void encryptSecureConfigurations(ArtifactStore artifactStore) { if (artifactStore != null && hasPluginInfo(artifactStore)) { for (ConfigurationProperty configuration : getConfiguration()) { if (!(configuration.getValue() != null && configuration.getValue().contains("#{"))) { configuration.handleSecureValueConfiguration(isSecure(configuration.getConfigKeyName(), artifactStore)); } } } } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PluggableArtifactConfig that = (PluggableArtifactConfig) o; if (!id.equals(that.id)) return false; if (!storeId.equals(that.storeId)) return false; return configuration != null ? configuration.equals(that.configuration) : that.configuration == null; } @Override public int hashCode() { int result = id.hashCode(); result = 31 * result + getArtifactType().hashCode(); result = 31 * result + storeId.hashCode(); result = 31 * result + (configuration != null ? configuration.hashCode() : 0); return result; } @Override public String toString() { return "PluggableArtifactConfig{" + "id='" + id + '\'' + ", storeId='" + storeId + '\'' + '}'; } private boolean isSecure(String configKeyName, ArtifactStore artifactStore) { ArtifactPluginInfo pluginInfo = getPluginInfo(artifactStore); return pluginInfo != null && pluginInfo.getArtifactConfigSettings() != null && pluginInfo.getArtifactConfigSettings().getConfiguration(configKeyName) != null && pluginInfo.getArtifactConfigSettings().getConfiguration(configKeyName).isSecure(); } private boolean hasPluginInfo(ArtifactStore artifactStore) { return getPluginInfo(artifactStore) != null; } private ArtifactPluginInfo getPluginInfo(ArtifactStore artifactStore) { return ArtifactMetadataStore.instance().getPluginInfo(artifactStore.getPluginId()); } public void addConfigurations(List<ConfigurationProperty> configurationProperties) { this.getConfiguration().addAll(configurationProperties); } }
package com.liferay.devcon2013.model; import com.liferay.portal.kernel.bean.AutoEscape; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.model.BaseModel; import com.liferay.portal.model.CacheModel; import com.liferay.portal.model.StagedGroupedModel; import com.liferay.portal.model.WorkflowedModel; import com.liferay.portal.service.ServiceContext; import com.liferay.portlet.expando.model.ExpandoBridge; import java.io.Serializable; import java.util.Date; /** * The base model interface for the BeerDrinker service. Represents a row in the &quot;SBUT_BeerDrinker&quot; database table, with each column mapped to a property of this class. * * <p> * This interface and its corresponding implementation {@link com.liferay.devcon2013.model.impl.BeerDrinkerModelImpl} exist only as a container for the default property accessors generated by ServiceBuilder. Helper methods and all application logic should be put in {@link com.liferay.devcon2013.model.impl.BeerDrinkerImpl}. * </p> * * @author Brian Wing Shun Chan * @see BeerDrinker * @see com.liferay.devcon2013.model.impl.BeerDrinkerImpl * @see com.liferay.devcon2013.model.impl.BeerDrinkerModelImpl * @generated */ public interface BeerDrinkerModel extends BaseModel<BeerDrinker>, StagedGroupedModel, WorkflowedModel { /* * NOTE FOR DEVELOPERS: * * Never modify or reference this interface directly. All methods that expect a beer drinker model instance should use the {@link BeerDrinker} interface instead. */ /** * Returns the primary key of this beer drinker. * * @return the primary key of this beer drinker */ public long getPrimaryKey(); /** * Sets the primary key of this beer drinker. * * @param primaryKey the primary key of this beer drinker */ public void setPrimaryKey(long primaryKey); /** * Returns the uuid of this beer drinker. * * @return the uuid of this beer drinker */ @AutoEscape @Override public String getUuid(); /** * Sets the uuid of this beer drinker. * * @param uuid the uuid of this beer drinker */ @Override public void setUuid(String uuid); /** * Returns the beer drinker ID of this beer drinker. * * @return the beer drinker ID of this beer drinker */ public long getBeerDrinkerId(); /** * Sets the beer drinker ID of this beer drinker. * * @param beerDrinkerId the beer drinker ID of this beer drinker */ public void setBeerDrinkerId(long beerDrinkerId); /** * Returns the group ID of this beer drinker. * * @return the group ID of this beer drinker */ @Override public long getGroupId(); /** * Sets the group ID of this beer drinker. * * @param groupId the group ID of this beer drinker */ @Override public void setGroupId(long groupId); /** * Returns the company ID of this beer drinker. * * @return the company ID of this beer drinker */ @Override public long getCompanyId(); /** * Sets the company ID of this beer drinker. * * @param companyId the company ID of this beer drinker */ @Override public void setCompanyId(long companyId); /** * Returns the user ID of this beer drinker. * * @return the user ID of this beer drinker */ @Override public long getUserId(); /** * Sets the user ID of this beer drinker. * * @param userId the user ID of this beer drinker */ @Override public void setUserId(long userId); /** * Returns the user uuid of this beer drinker. * * @return the user uuid of this beer drinker * @throws SystemException if a system exception occurred */ @Override public String getUserUuid() throws SystemException; /** * Sets the user uuid of this beer drinker. * * @param userUuid the user uuid of this beer drinker */ @Override public void setUserUuid(String userUuid); /** * Returns the user name of this beer drinker. * * @return the user name of this beer drinker */ @AutoEscape @Override public String getUserName(); /** * Sets the user name of this beer drinker. * * @param userName the user name of this beer drinker */ @Override public void setUserName(String userName); /** * Returns the create date of this beer drinker. * * @return the create date of this beer drinker */ @Override public Date getCreateDate(); /** * Sets the create date of this beer drinker. * * @param createDate the create date of this beer drinker */ @Override public void setCreateDate(Date createDate); /** * Returns the modified date of this beer drinker. * * @return the modified date of this beer drinker */ @Override public Date getModifiedDate(); /** * Sets the modified date of this beer drinker. * * @param modifiedDate the modified date of this beer drinker */ @Override public void setModifiedDate(Date modifiedDate); /** * Returns the status of this beer drinker. * * @return the status of this beer drinker */ @Override public int getStatus(); /** * Sets the status of this beer drinker. * * @param status the status of this beer drinker */ @Override public void setStatus(int status); /** * Returns the status by user ID of this beer drinker. * * @return the status by user ID of this beer drinker */ @Override public long getStatusByUserId(); /** * Sets the status by user ID of this beer drinker. * * @param statusByUserId the status by user ID of this beer drinker */ @Override public void setStatusByUserId(long statusByUserId); /** * Returns the status by user uuid of this beer drinker. * * @return the status by user uuid of this beer drinker * @throws SystemException if a system exception occurred */ @Override public String getStatusByUserUuid() throws SystemException; /** * Sets the status by user uuid of this beer drinker. * * @param statusByUserUuid the status by user uuid of this beer drinker */ @Override public void setStatusByUserUuid(String statusByUserUuid); /** * Returns the status by user name of this beer drinker. * * @return the status by user name of this beer drinker */ @AutoEscape @Override public String getStatusByUserName(); /** * Sets the status by user name of this beer drinker. * * @param statusByUserName the status by user name of this beer drinker */ @Override public void setStatusByUserName(String statusByUserName); /** * Returns the status date of this beer drinker. * * @return the status date of this beer drinker */ @Override public Date getStatusDate(); /** * Sets the status date of this beer drinker. * * @param statusDate the status date of this beer drinker */ @Override public void setStatusDate(Date statusDate); /** * Returns the name of this beer drinker. * * @return the name of this beer drinker */ @AutoEscape public String getName(); /** * Sets the name of this beer drinker. * * @param name the name of this beer drinker */ public void setName(String name); /** * Returns the alcohol level of this beer drinker. * * @return the alcohol level of this beer drinker */ public float getAlcoholLevel(); /** * Sets the alcohol level of this beer drinker. * * @param alcoholLevel the alcohol level of this beer drinker */ public void setAlcoholLevel(float alcoholLevel); /** * @deprecated As of 6.1.0, replaced by {@link #isApproved()} */ @Override public boolean getApproved(); /** * Returns <code>true</code> if this beer drinker is approved. * * @return <code>true</code> if this beer drinker is approved; <code>false</code> otherwise */ @Override public boolean isApproved(); /** * Returns <code>true</code> if this beer drinker is denied. * * @return <code>true</code> if this beer drinker is denied; <code>false</code> otherwise */ @Override public boolean isDenied(); /** * Returns <code>true</code> if this beer drinker is a draft. * * @return <code>true</code> if this beer drinker is a draft; <code>false</code> otherwise */ @Override public boolean isDraft(); /** * Returns <code>true</code> if this beer drinker is expired. * * @return <code>true</code> if this beer drinker is expired; <code>false</code> otherwise */ @Override public boolean isExpired(); /** * Returns <code>true</code> if this beer drinker is inactive. * * @return <code>true</code> if this beer drinker is inactive; <code>false</code> otherwise */ @Override public boolean isInactive(); /** * Returns <code>true</code> if this beer drinker is incomplete. * * @return <code>true</code> if this beer drinker is incomplete; <code>false</code> otherwise */ @Override public boolean isIncomplete(); /** * Returns <code>true</code> if this beer drinker is in the Recycle Bin. * * @return <code>true</code> if this beer drinker is in the Recycle Bin; <code>false</code> otherwise */ @Override public boolean isInTrash(); /** * Returns <code>true</code> if this beer drinker is pending. * * @return <code>true</code> if this beer drinker is pending; <code>false</code> otherwise */ @Override public boolean isPending(); /** * Returns <code>true</code> if this beer drinker is scheduled. * * @return <code>true</code> if this beer drinker is scheduled; <code>false</code> otherwise */ @Override public boolean isScheduled(); @Override public boolean isNew(); @Override public void setNew(boolean n); @Override public boolean isCachedModel(); @Override public void setCachedModel(boolean cachedModel); @Override public boolean isEscapedModel(); @Override public Serializable getPrimaryKeyObj(); @Override public void setPrimaryKeyObj(Serializable primaryKeyObj); @Override public ExpandoBridge getExpandoBridge(); @Override public void setExpandoBridgeAttributes(BaseModel<?> baseModel); @Override public void setExpandoBridgeAttributes(ExpandoBridge expandoBridge); @Override public void setExpandoBridgeAttributes(ServiceContext serviceContext); @Override public Object clone(); @Override public int compareTo(BeerDrinker beerDrinker); @Override public int hashCode(); @Override public CacheModel<BeerDrinker> toCacheModel(); @Override public BeerDrinker toEscapedModel(); @Override public BeerDrinker toUnescapedModel(); @Override public String toString(); @Override public String toXmlString(); }
package com.adjust.sdk.test; import android.content.Context; import android.os.SystemClock; import android.test.ActivityInstrumentationTestCase2; import com.adjust.sdk.ActivityHandler; import com.adjust.sdk.ActivityPackage; import com.adjust.sdk.AdjustConfig; import com.adjust.sdk.AdjustFactory; import com.adjust.sdk.AttributionHandler; import org.apache.http.client.methods.HttpUriRequest; import org.json.JSONException; import org.json.JSONObject; /** * Created by pfms on 28/01/15. */ public class TestAttributionHandler extends ActivityInstrumentationTestCase2<UnitTestActivity> { private MockLogger mockLogger; private MockActivityHandler mockActivityHandler; private MockHttpClient mockHttpClient; private AssertUtil assertUtil; private UnitTestActivity activity; private Context context; private ActivityPackage attributionPackage; public TestAttributionHandler() { super(UnitTestActivity.class); } public TestAttributionHandler(Class<UnitTestActivity> activityClass) { super(activityClass); } @Override protected void setUp() throws Exception { super.setUp(); mockLogger = new MockLogger(); mockActivityHandler = new MockActivityHandler(mockLogger); mockHttpClient = new MockHttpClient(mockLogger); assertUtil = new AssertUtil(mockLogger); AdjustFactory.setLogger(mockLogger); AdjustFactory.setActivityHandler(mockActivityHandler); AdjustFactory.setHttpClient(mockHttpClient); activity = getActivity(); context = activity.getApplicationContext(); attributionPackage = getAttributionPackage(); } private ActivityPackage getAttributionPackage() { MockAttributionHandler mockAttributionHandler = new MockAttributionHandler(mockLogger); MockPackageHandler mockPackageHandler = new MockPackageHandler(mockLogger); AdjustFactory.setAttributionHandler(mockAttributionHandler); AdjustFactory.setPackageHandler(mockPackageHandler); // create the config to start the session AdjustConfig config = new AdjustConfig(context, "123456789012", AdjustConfig.ENVIRONMENT_SANDBOX); // start activity handler with config ActivityHandler activityHandler = ActivityHandler.getInstance(config); SystemClock.sleep(3000); ActivityPackage attributionPackage = activityHandler.getAttributionPackage(); TestActivityPackage attributionPackageTest = new TestActivityPackage(attributionPackage); attributionPackageTest.testAttributionPackage(); mockLogger.reset(); return attributionPackage; } @Override protected void tearDown() throws Exception { super.tearDown(); AdjustFactory.setActivityHandler(null); AdjustFactory.setLogger(null); activity = null; context = null; } public void testGetAttribution() { // assert test name to read better in logcat mockLogger.Assert("TestAttributionHandler testGetAttribution"); AttributionHandler attributionHandler = new AttributionHandler(mockActivityHandler, attributionPackage, false, true); // test null client nullClientTest(attributionHandler); // test client exception clientExceptionTest(attributionHandler); // test wrong json response wrongJsonTest(attributionHandler); // test empty response emptyJsonResponseTest(attributionHandler); // test server error serverErrorTest(attributionHandler); // test ok response with message okMessageTest(attributionHandler); } public void testCheckAttribution() { // assert test name to read better in logcat mockLogger.Assert("TestAttributionHandler testCheckAttribution"); AttributionHandler attributionHandler = new AttributionHandler(mockActivityHandler, attributionPackage, false, true); String response = "Response: { \"attribution\" : " + "{\"tracker_token\" : \"ttValue\" , " + "\"tracker_name\" : \"tnValue\" , " + "\"network\" : \"nValue\" , " + "\"campaign\" : \"cpValue\" , " + "\"adgroup\" : \"aValue\" , " + "\"creative\" : \"ctValue\" , " + "\"click_label\" : \"clValue\" } }"; callCheckAttributionWithGet(attributionHandler, ResponseType.ATTRIBUTION, response); // check attribution was called without ask_in assertUtil.test("ActivityHandler tryUpdateAttribution, tt:ttValue tn:tnValue net:nValue cam:cpValue adg:aValue cre:ctValue cl:clValue"); // updated set askingAttribution to false assertUtil.test("ActivityHandler setAskingAttribution, false"); // it did not update to true assertUtil.notInTest("ActivityHandler setAskingAttribution, true"); // and waiting for query assertUtil.notInDebug("Waiting to query attribution"); } public void testAskIn() { // assert test name to read better in logcat mockLogger.Assert("TestAttributionHandler testAskIn"); AttributionHandler attributionHandler = new AttributionHandler(mockActivityHandler, attributionPackage, false, true); String response = "Response: { \"ask_in\" : 4000 }"; callCheckAttributionWithGet(attributionHandler, ResponseType.ASK_IN, response); // change the response to avoid a cycle; mockHttpClient.responseType = ResponseType.MESSAGE; // check attribution was called with ask_in assertUtil.notInTest("ActivityHandler tryUpdateAttribution"); // it did update to true assertUtil.test("ActivityHandler setAskingAttribution, true"); // and waited to for query assertUtil.debug("Waiting to query attribution in 4000 milliseconds"); SystemClock.sleep(2000); JSONObject askInJsonResponse = null; try { askInJsonResponse = new JSONObject("{ \"ask_in\" : 5000 }"); } catch (JSONException e) { fail(e.getMessage()); } attributionHandler.checkAttribution(askInJsonResponse); SystemClock.sleep(3000); // it did update to true assertUtil.test("ActivityHandler setAskingAttribution, true"); // and waited to for query assertUtil.debug("Waiting to query attribution in 5000 milliseconds"); // it was been waiting for 1000 + 2000 + 3000 = 6 seconds // check that the mock http client was not called because the original clock was reseted assertUtil.notInTest("HttpClient execute"); // check that it was finally called after 6 seconds after the second ask_in SystemClock.sleep(3000); okMessageTestLogs(); requestTest(mockHttpClient.lastRequest); } public void testPause() { // assert test name to read better in logcat mockLogger.Assert("TestAttributionHandler testPause"); AttributionHandler attributionHandler = new AttributionHandler(mockActivityHandler, attributionPackage, true, true); mockHttpClient.responseType = ResponseType.MESSAGE; attributionHandler.getAttribution(); SystemClock.sleep(1000); // check that the activity handler is paused assertUtil.debug("Attribution handler is paused"); // and it did not call the http client assertUtil.isNull(mockHttpClient.lastRequest); assertUtil.notInTest("HttpClient execute"); } public void testWithoutListener() { // assert test name to read better in logcat mockLogger.Assert("TestAttributionHandler testPause"); AttributionHandler attributionHandler = new AttributionHandler(mockActivityHandler, attributionPackage, false, false); mockHttpClient.responseType = ResponseType.MESSAGE; attributionHandler.getAttribution(); SystemClock.sleep(1000); // check that the activity handler is not paused assertUtil.notInDebug("Attribution handler is paused"); // but it did not call the http client assertUtil.isNull(mockHttpClient.lastRequest); assertUtil.notInTest("HttpClient execute"); } private void nullClientTest(AttributionHandler attributionHandler) { startGetAttributionTest(attributionHandler, null); // check response was not logged assertUtil.notInVerbose("Response"); } private void clientExceptionTest(AttributionHandler attributionHandler) { startGetAttributionTest(attributionHandler, ResponseType.CLIENT_PROTOCOL_EXCEPTION); // check the client error assertUtil.error("Failed to get attribution (testResponseError)"); } private void wrongJsonTest(AttributionHandler attributionHandler) { startGetAttributionTest(attributionHandler, ResponseType.WRONG_JSON); // check that the mock http client was called assertUtil.test("HttpClient execute"); assertUtil.verbose("Response: not a json response"); assertUtil.error("Failed to parse json response. (Value not of type java.lang.String cannot be converted to JSONObject)"); } private void emptyJsonResponseTest(AttributionHandler attributionHandler) { startGetAttributionTest(attributionHandler, ResponseType.EMPTY_JSON); // check that the mock http client was called assertUtil.test("HttpClient execute"); assertUtil.verbose("Response: { }"); assertUtil.info("No message found"); // check attribution was called without ask_in assertUtil.test("ActivityHandler tryUpdateAttribution, null"); } private void serverErrorTest(AttributionHandler attributionHandler) { startGetAttributionTest(attributionHandler, ResponseType.INTERNAL_SERVER_ERROR); // check that the mock http client was called assertUtil.test("HttpClient execute"); // the response logged assertUtil.verbose("Response: { \"message\": \"testResponseError\"}"); // the message in the response assertUtil.error("testResponseError"); // check attribution was called without ask_in assertUtil.test("ActivityHandler tryUpdateAttribution, null"); assertUtil.test("ActivityHandler setAskingAttribution, false"); } private void okMessageTest(AttributionHandler attributionHandler) { startGetAttributionTest(attributionHandler, ResponseType.MESSAGE); okMessageTestLogs(); } private void okMessageTestLogs() { // check that the mock http client was called assertUtil.test("HttpClient execute"); // the response logged assertUtil.verbose("Response: { \"message\" : \"response OK\"}"); // the message in the response assertUtil.info("response OK"); // check attribution was called without ask_in assertUtil.test("ActivityHandler tryUpdateAttribution, null"); } private void callCheckAttributionWithGet(AttributionHandler attributionHandler, ResponseType responseType, String response) { startGetAttributionTest(attributionHandler, responseType); // check that the mock http client was called assertUtil.test("HttpClient execute"); // the response logged assertUtil.verbose(response); } private void startGetAttributionTest(AttributionHandler attributionHandler, ResponseType responseType) { mockHttpClient.responseType = responseType; attributionHandler.getAttribution(); SystemClock.sleep(1000); requestTest(mockHttpClient.lastRequest); } private void requestTest(HttpUriRequest request) { if (request == null) return; java.net.URI uri = request.getURI(); assertUtil.isEqual("https", uri.getScheme()); assertUtil.isEqual("app.adjust.com", uri.getAuthority()); assertUtil.isEqual("GET", request.getMethod()); } }
/* * Copyright 2002-2006,2009 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.opensymphony.xwork2; import com.opensymphony.xwork2.config.ConfigurationException; import com.opensymphony.xwork2.config.entities.ActionConfig; import com.opensymphony.xwork2.config.entities.InterceptorMapping; import com.opensymphony.xwork2.config.entities.ResultConfig; import com.opensymphony.xwork2.inject.Container; import com.opensymphony.xwork2.inject.Inject; import com.opensymphony.xwork2.interceptor.PreResultListener; import com.opensymphony.xwork2.util.ValueStack; import com.opensymphony.xwork2.util.ValueStackFactory; import com.opensymphony.xwork2.util.logging.Logger; import com.opensymphony.xwork2.util.logging.LoggerFactory; import com.opensymphony.xwork2.util.profiling.UtilTimerStack; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * The Default ActionInvocation implementation * * @author Rainer Hermanns * @author tmjee * @version $Date: 2013-05-17 14:38:42 +0200 (Fr, 17 Mai 2013) $ $Id: DefaultActionInvocation.java 1483773 2013-05-17 12:38:42Z lukaszlenart $ * @see com.opensymphony.xwork2.DefaultActionProxy */ public class DefaultActionInvocation implements ActionInvocation { private static final long serialVersionUID = -585293628862447329L; //static { // if (ObjectFactory.getContinuationPackage() != null) { // continuationHandler = new ContinuationHandler(); // } //} private static final Logger LOG = LoggerFactory.getLogger(DefaultActionInvocation.class); private static final Class[] EMPTY_CLASS_ARRAY = new Class[0]; private static final Object[] EMPTY_OBJECT_ARRAY = new Object[0]; protected Object action; protected ActionProxy proxy; protected List<PreResultListener> preResultListeners; protected Map<String, Object> extraContext; protected ActionContext invocationContext; protected Iterator<InterceptorMapping> interceptors; protected ValueStack stack; protected Result result; protected Result explicitResult; protected String resultCode; protected boolean executed = false; protected boolean pushAction = true; protected ObjectFactory objectFactory; protected ActionEventListener actionEventListener; protected ValueStackFactory valueStackFactory; protected Container container; protected UnknownHandlerManager unknownHandlerManager; public DefaultActionInvocation(final Map<String, Object> extraContext, final boolean pushAction) { this.extraContext = extraContext; this.pushAction = pushAction; } @Inject public void setUnknownHandlerManager(UnknownHandlerManager unknownHandlerManager) { this.unknownHandlerManager = unknownHandlerManager; } @Inject public void setValueStackFactory(ValueStackFactory fac) { this.valueStackFactory = fac; } @Inject public void setObjectFactory(ObjectFactory fac) { this.objectFactory = fac; } @Inject public void setContainer(Container cont) { this.container = cont; } @Inject(required=false) public void setActionEventListener(ActionEventListener listener) { this.actionEventListener = listener; } public Object getAction() { return action; } public boolean isExecuted() { return executed; } public ActionContext getInvocationContext() { return invocationContext; } public ActionProxy getProxy() { return proxy; } /** * If the DefaultActionInvocation has been executed before and the Result is an instance of ActionChainResult, this method * will walk down the chain of ActionChainResults until it finds a non-chain result, which will be returned. If the * DefaultActionInvocation's result has not been executed before, the Result instance will be created and populated with * the result params. * * @return a Result instance * @throws Exception */ public Result getResult() throws Exception { Result returnResult = result; // If we've chained to other Actions, we need to find the last result while (returnResult instanceof ActionChainResult) { ActionProxy aProxy = ((ActionChainResult) returnResult).getProxy(); if (aProxy != null) { Result proxyResult = aProxy.getInvocation().getResult(); if ((proxyResult != null) && (aProxy.getExecuteResult())) { returnResult = proxyResult; } else { break; } } else { break; } } return returnResult; } public String getResultCode() { return resultCode; } public void setResultCode(String resultCode) { if (isExecuted()) throw new IllegalStateException("Result has already been executed."); this.resultCode = resultCode; } public ValueStack getStack() { return stack; } /** * Register a com.opensymphony.xwork2.interceptor.PreResultListener to be notified after the Action is executed and before the * Result is executed. The ActionInvocation implementation must guarantee that listeners will be called in the order * in which they are registered. Listener registration and execution does not need to be thread-safe. * * @param listener to register */ public void addPreResultListener(PreResultListener listener) { if (preResultListeners == null) { preResultListeners = new ArrayList<PreResultListener>(1); } preResultListeners.add(listener); } public Result createResult() throws Exception { if (explicitResult != null) { Result ret = explicitResult; explicitResult = null; return ret; } ActionConfig config = proxy.getConfig(); Map<String, ResultConfig> results = config.getResults(); ResultConfig resultConfig = null; try { resultConfig = results.get(resultCode); } catch (NullPointerException e) { if (LOG.isDebugEnabled()) { LOG.debug("Got NPE trying to read result configuration for resultCode [#0]", resultCode); } } if (resultConfig == null) { // If no result is found for the given resultCode, try to get a wildcard '*' match. resultConfig = results.get("*"); } if (resultConfig != null) { try { return objectFactory.buildResult(resultConfig, invocationContext.getContextMap()); } catch (Exception e) { if (LOG.isErrorEnabled()) { LOG.error("There was an exception while instantiating the result of type #0", e, resultConfig.getClassName()); } throw new XWorkException(e, resultConfig); } } else if (resultCode != null && !Action.NONE.equals(resultCode) && unknownHandlerManager.hasUnknownHandlers()) { return unknownHandlerManager.handleUnknownResult(invocationContext, proxy.getActionName(), proxy.getConfig(), resultCode); } return null; } /** * @throws ConfigurationException If no result can be found with the returned code */ public String invoke() throws Exception { String profileKey = "invoke: "; try { UtilTimerStack.push(profileKey); if (executed) { throw new IllegalStateException("Action has already executed"); } if (interceptors.hasNext()) { final InterceptorMapping interceptor = interceptors.next(); String interceptorMsg = "interceptor: " + interceptor.getName(); UtilTimerStack.push(interceptorMsg); try { resultCode = interceptor.getInterceptor().intercept(DefaultActionInvocation.this); } finally { UtilTimerStack.pop(interceptorMsg); } } else { resultCode = invokeActionOnly(); } // this is needed because the result will be executed, then control will return to the Interceptor, which will // return above and flow through again if (!executed) { if (preResultListeners != null) { for (Object preResultListener : preResultListeners) { PreResultListener listener = (PreResultListener) preResultListener; String _profileKey = "preResultListener: "; try { UtilTimerStack.push(_profileKey); listener.beforeResult(this, resultCode); } finally { UtilTimerStack.pop(_profileKey); } } } // now execute the result, if we're supposed to if (proxy.getExecuteResult()) { executeResult(); } executed = true; } return resultCode; } finally { UtilTimerStack.pop(profileKey); } } public String invokeActionOnly() throws Exception { return invokeAction(getAction(), proxy.getConfig()); } protected void createAction(Map<String, Object> contextMap) { // load action String timerKey = "actionCreate: " + proxy.getActionName(); try { UtilTimerStack.push(timerKey); action = objectFactory.buildAction(proxy.getActionName(), proxy.getNamespace(), proxy.getConfig(), contextMap); } catch (InstantiationException e) { throw new XWorkException("Unable to intantiate Action!", e, proxy.getConfig()); } catch (IllegalAccessException e) { throw new XWorkException("Illegal access to constructor, is it public?", e, proxy.getConfig()); } catch (Exception e) { String gripe; if (proxy == null) { gripe = "Whoa! No ActionProxy instance found in current ActionInvocation. This is bad ... very bad"; } else if (proxy.getConfig() == null) { gripe = "Sheesh. Where'd that ActionProxy get to? I can't find it in the current ActionInvocation!?"; } else if (proxy.getConfig().getClassName() == null) { gripe = "No Action defined for '" + proxy.getActionName() + "' in namespace '" + proxy.getNamespace() + "'"; } else { gripe = "Unable to instantiate Action, " + proxy.getConfig().getClassName() + ", defined for '" + proxy.getActionName() + "' in namespace '" + proxy.getNamespace() + "'"; } gripe += (((" -- " + e.getMessage()) != null) ? e.getMessage() : " [no message in exception]"); throw new XWorkException(gripe, e, proxy.getConfig()); } finally { UtilTimerStack.pop(timerKey); } if (actionEventListener != null) { action = actionEventListener.prepare(action, stack); } } protected Map<String, Object> createContextMap() { Map<String, Object> contextMap; if ((extraContext != null) && (extraContext.containsKey(ActionContext.VALUE_STACK))) { // In case the ValueStack was passed in stack = (ValueStack) extraContext.get(ActionContext.VALUE_STACK); if (stack == null) { throw new IllegalStateException("There was a null Stack set into the extra params."); } contextMap = stack.getContext(); } else { // create the value stack // this also adds the ValueStack to its context stack = valueStackFactory.createValueStack(); // create the action context contextMap = stack.getContext(); } // put extraContext in if (extraContext != null) { contextMap.putAll(extraContext); } //put this DefaultActionInvocation into the context map contextMap.put(ActionContext.ACTION_INVOCATION, this); contextMap.put(ActionContext.CONTAINER, container); return contextMap; } /** * Uses getResult to get the final Result and executes it * * @throws ConfigurationException If not result can be found with the returned code */ private void executeResult() throws Exception { result = createResult(); String timerKey = "executeResult: " + getResultCode(); try { UtilTimerStack.push(timerKey); if (result != null) { result.execute(this); } else if (resultCode != null && !Action.NONE.equals(resultCode)) { throw new ConfigurationException("No result defined for action " + getAction().getClass().getName() + " and result " + getResultCode(), proxy.getConfig()); } else { if (LOG.isDebugEnabled()) { LOG.debug("No result returned for action " + getAction().getClass().getName() + " at " + proxy.getConfig().getLocation()); } } } finally { UtilTimerStack.pop(timerKey); } } public void init(ActionProxy proxy) { this.proxy = proxy; Map<String, Object> contextMap = createContextMap(); // Setting this so that other classes, like object factories, can use the ActionProxy and other // contextual information to operate ActionContext actionContext = ActionContext.getContext(); if (actionContext != null) { actionContext.setActionInvocation(this); } createAction(contextMap); if (pushAction) { stack.push(action); contextMap.put("action", action); } invocationContext = new ActionContext(contextMap); invocationContext.setName(proxy.getActionName()); // get a new List so we don't get problems with the iterator if someone changes the list List<InterceptorMapping> interceptorList = new ArrayList<InterceptorMapping>(proxy.getConfig().getInterceptors()); interceptors = interceptorList.iterator(); } protected String invokeAction(Object action, ActionConfig actionConfig) throws Exception { String methodName = proxy.getMethod(); if (LOG.isDebugEnabled()) { LOG.debug("Executing action method = " + actionConfig.getMethodName()); } String timerKey = "invokeAction: " + proxy.getActionName(); try { UtilTimerStack.push(timerKey); boolean methodCalled = false; Object methodResult = null; Method method = null; try { method = getAction().getClass().getMethod(methodName, EMPTY_CLASS_ARRAY); } catch (NoSuchMethodException e) { // hmm -- OK, try doXxx instead try { String altMethodName = "do" + methodName.substring(0, 1).toUpperCase() + methodName.substring(1); method = getAction().getClass().getMethod(altMethodName, EMPTY_CLASS_ARRAY); } catch (NoSuchMethodException e1) { // well, give the unknown handler a shot if (unknownHandlerManager.hasUnknownHandlers()) { try { methodResult = unknownHandlerManager.handleUnknownMethod(action, methodName); methodCalled = true; } catch (NoSuchMethodException e2) { // throw the original one throw e; } } else { throw e; } } } if (!methodCalled) { methodResult = method.invoke(action, EMPTY_OBJECT_ARRAY); } return saveResult(actionConfig, methodResult); } catch (NoSuchMethodException e) { throw new IllegalArgumentException("The " + methodName + "() is not defined in action " + getAction().getClass() + ""); } catch (InvocationTargetException e) { // We try to return the source exception. Throwable t = e.getTargetException(); if (actionEventListener != null) { String result = actionEventListener.handleException(t, getStack()); if (result != null) { return result; } } if (t instanceof Exception) { throw (Exception) t; } else { throw e; } } finally { UtilTimerStack.pop(timerKey); } } /** * Save the result to be used later. * @param actionConfig current ActionConfig * @param methodResult the result of the action. * @return the result code to process. */ protected String saveResult(ActionConfig actionConfig, Object methodResult) { if (methodResult instanceof Result) { this.explicitResult = (Result) methodResult; // Wire the result automatically container.inject(explicitResult); return null; } else { return (String) methodResult; } } /** * Version ready to be serialize * * @return instance without reference to {@link Container} */ public ActionInvocation serialize() { DefaultActionInvocation that = this; that.container = null; return that; } /** * Restoring Container * * @param actionContext current {@link ActionContext} * @return instance which can be used to invoke action */ public ActionInvocation deserialize(ActionContext actionContext) { DefaultActionInvocation that = this; that.container = actionContext.getContainer(); return that; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.metadata.model; import java.io.Serializable; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Queue; import java.util.Set; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.apache.kylin.common.KylinConfig; import org.apache.kylin.common.persistence.ResourceStore; import org.apache.kylin.common.persistence.RootPersistentEntity; import org.apache.kylin.common.util.StringUtil; import org.apache.kylin.metadata.MetadataConstants; import org.apache.kylin.metadata.model.JoinsTree.Chain; import org.apache.kylin.metadata.project.ProjectInstance; import org.apache.kylin.metadata.project.ProjectManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JsonAutoDetect; import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; @SuppressWarnings("serial") @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE) public class DataModelDesc extends RootPersistentEntity { private static final Logger logger = LoggerFactory.getLogger(DataModelDesc.class); public static enum TableKind implements Serializable { FACT, LOOKUP } public static enum RealizationCapacity implements Serializable { SMALL, MEDIUM, LARGE } private KylinConfig config; @JsonProperty("name") private String name; @JsonProperty("owner") private String owner; @JsonProperty("is_draft") private boolean isDraft; @JsonProperty("description") private String description; @JsonProperty("fact_table") private String rootFactTable; @JsonProperty("lookups") @JsonInclude(JsonInclude.Include.NON_NULL) private JoinTableDesc[] joinTables; @JsonProperty("join_tables") @JsonInclude(JsonInclude.Include.NON_NULL) private JoinTableDesc[] deprecatedLookups; // replaced by "join_tables" since KYLIN-1875 @JsonProperty("dimensions") private List<ModelDimensionDesc> dimensions; @JsonProperty("metrics") private String[] metrics; @JsonProperty("filter_condition") private String filterCondition; @JsonProperty("partition_desc") PartitionDesc partitionDesc; @JsonProperty("capacity") private RealizationCapacity capacity = RealizationCapacity.MEDIUM; // computed attributes private TableRef rootFactTableRef; private Set<TableRef> factTableRefs = Sets.newLinkedHashSet(); private Set<TableRef> lookupTableRefs = Sets.newLinkedHashSet(); private Set<TableRef> allTableRefs = Sets.newLinkedHashSet(); private Map<String, TableRef> aliasMap = Maps.newHashMap(); // alias => TableRef, a table has exactly one alias private Map<String, TableRef> tableNameMap = Maps.newHashMap(); // name => TableRef, a table maybe referenced by multiple names private JoinsTree joinsTree; /** * Error messages during resolving json metadata */ private List<String> errors = new ArrayList<String>(); // don't use unless you're sure, for jackson only public DataModelDesc() { } public KylinConfig getConfig() { return config; } @Override public String resourceName() { return name; } public String getName() { return name; } // for updating name from draft to ready public void setName(String name) { this.name = name; } public String getOwner() { return owner; } public void setOwner(String owner) { this.owner = owner; } public boolean isDraft() { return isDraft; } public void setDraft(boolean isDraft) { this.isDraft = isDraft; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public TableRef getRootFactTable() { return rootFactTableRef; } public String getRootFactTableName() { return rootFactTable; } public void setRootFactTableName(String rootFactTable) { this.rootFactTable = rootFactTable; } public Set<TableRef> getAllTables() { return allTableRefs; } public Set<TableRef> getFactTables() { return factTableRefs; } public Map<String, TableRef> getAliasMap() { return Collections.unmodifiableMap(aliasMap); } public Set<TableRef> getLookupTables() { return lookupTableRefs; } public JoinTableDesc[] getJoinTables() { return joinTables; } public void setJoinTables(JoinTableDesc[] joinTables) { this.joinTables = joinTables; } public JoinDesc getJoinByPKSide(TableRef table) { return joinsTree.getJoinByPKSide(table); } public JoinsTree getJoinsTree() { return joinsTree; } @Deprecated public List<TableDesc> getLookupTableDescs() { List<TableDesc> result = Lists.newArrayList(); for (TableRef table : getLookupTables()) { result.add(table.getTableDesc()); } return result; } public boolean isLookupTable(TableRef t) { if (t == null) return false; else return lookupTableRefs.contains(t); } public boolean isLookupTable(String fullTableName) { for (TableRef t : lookupTableRefs) { if (t.getTableIdentity().equals(fullTableName)) return true; } return false; } public boolean isFactTable(TableRef t) { if (t == null) return false; else return factTableRefs.contains(t); } //TODO: different from isFactTable(TableRef t) public boolean isFactTable(String fullTableName) { for (TableRef t : factTableRefs) { if (t.getTableIdentity().equals(fullTableName)) return true; } return false; } public boolean containsTable(TableDesc table) { for (TableRef t : allTableRefs) { if (t.getTableIdentity().equals(table.getIdentity()) && StringUtil.equals(t.getTableDesc().getProject(), table.getProject())) return true; } return false; } public String getFilterCondition() { return filterCondition; } public void setFilterCondition(String filterCondition) { this.filterCondition = filterCondition; } public PartitionDesc getPartitionDesc() { return partitionDesc; } public void setPartitionDesc(PartitionDesc partitionDesc) { this.partitionDesc = partitionDesc; } public RealizationCapacity getCapacity() { return capacity; } public void setCapacity(RealizationCapacity capacity) { this.capacity = capacity; } public TblColRef findColumn(String table, String column) throws IllegalArgumentException { TableRef tableRef = findTable(table); TblColRef result = tableRef.getColumn(column.toUpperCase(Locale.ROOT)); if (result == null) throw new IllegalArgumentException("Column not found by " + table + "." + column); return result; } public TblColRef findColumn(String column) throws IllegalArgumentException { TblColRef result = null; String input = column; column = column.toUpperCase(Locale.ROOT); int cut = column.lastIndexOf('.'); if (cut > 0) { // table specified result = findColumn(column.substring(0, cut), column.substring(cut + 1)); } else { // table not specified, try each table for (TableRef tableRef : allTableRefs) { result = tableRef.getColumn(column); if (result != null) break; } } if (result == null) throw new IllegalArgumentException("Column not found by " + input); return result; } // find by unique name, that must uniquely identifies a table in the model public TableRef findTable(String table) throws IllegalArgumentException { TableRef result = tableNameMap.get(table.toUpperCase(Locale.ROOT)); if (result == null) { throw new IllegalArgumentException("Table not found by " + table); } return result; } // find by table identity, that may match multiple tables in the model public TableRef findFirstTable(String tableIdentity) throws IllegalArgumentException { if (rootFactTableRef.getTableIdentity().equals(tableIdentity)) return rootFactTableRef; for (TableRef fact : factTableRefs) { if (fact.getTableIdentity().equals(tableIdentity)) return fact; } for (TableRef lookup : lookupTableRefs) { if (lookup.getTableIdentity().equals(tableIdentity)) return lookup; } throw new IllegalArgumentException("Table not found by " + tableIdentity + " in model " + name); } /** * @param isOnlineModel will affect the exposed view of project specific tables */ public void init(KylinConfig config, Map<String, TableDesc> tables, List<DataModelDesc> otherModels, boolean isOnlineModel) { initInternal(config, tables, otherModels, isOnlineModel); } public void initInternal(KylinConfig config, Map<String, TableDesc> tables, List<DataModelDesc> otherModels, boolean isOnlineModel) { this.config = config; initJoinTablesForUpgrade(); initTableAlias(tables); initJoinColumns(); reorderJoins(tables); initJoinsTree(); initDimensionsAndMetrics(); initPartitionDesc(); initFilterCondition(); boolean reinit = validate(); if (reinit) { // model slightly changed by validate() and must init() again initInternal(config, tables, otherModels, isOnlineModel); } } private void initJoinTablesForUpgrade() { if (joinTables == null) { joinTables = new JoinTableDesc[0]; } if (deprecatedLookups != null) { JoinTableDesc[] copy = Arrays.copyOf(joinTables, joinTables.length + deprecatedLookups.length); System.arraycopy(deprecatedLookups, 0, copy, joinTables.length, deprecatedLookups.length); joinTables = copy; deprecatedLookups = null; } } private void initTableAlias(Map<String, TableDesc> tables) { factTableRefs.clear(); lookupTableRefs.clear(); allTableRefs.clear(); aliasMap.clear(); tableNameMap.clear(); if (StringUtils.isEmpty(rootFactTable)) { throw new IllegalStateException("root fact table should not be empty"); } rootFactTable = rootFactTable.toUpperCase(Locale.ROOT); if (tables.containsKey(rootFactTable) == false) throw new IllegalStateException("Root fact table does not exist:" + rootFactTable); TableDesc rootDesc = tables.get(rootFactTable); rootFactTableRef = new TableRef(this, rootDesc.getName(), rootDesc, false); addAlias(rootFactTableRef); factTableRefs.add(rootFactTableRef); for (JoinTableDesc join : joinTables) { join.setTable(join.getTable().toUpperCase(Locale.ROOT)); if (tables.containsKey(join.getTable()) == false) throw new IllegalStateException("Join table does not exist:" + join.getTable()); TableDesc tableDesc = tables.get(join.getTable()); String alias = join.getAlias(); if (alias == null) { alias = tableDesc.getName(); } alias = alias.toUpperCase(Locale.ROOT); join.setAlias(alias); boolean isLookup = join.getKind() == TableKind.LOOKUP; TableRef ref = new TableRef(this, alias, tableDesc, isLookup); join.setTableRef(ref); addAlias(ref); (isLookup ? lookupTableRefs : factTableRefs).add(ref); } tableNameMap.putAll(aliasMap); allTableRefs.addAll(factTableRefs); allTableRefs.addAll(lookupTableRefs); } private void addAlias(TableRef ref) { String alias = ref.getAlias(); if (aliasMap.containsKey(alias)) throw new IllegalStateException("Alias '" + alias + "' ref to multiple tables: " + ref.getTableIdentity() + ", " + aliasMap.get(alias).getTableIdentity()); aliasMap.put(alias, ref); TableDesc table = ref.getTableDesc(); addTableName(table.getName(), ref); addTableName(table.getIdentity(), ref); } private void addTableName(String name, TableRef ref) { if (tableNameMap.containsKey(name)) { tableNameMap.put(name, null); // conflict name } else { tableNameMap.put(name, ref); } } private void initDimensionsAndMetrics() { for (ModelDimensionDesc dim : dimensions) { dim.init(this); } for (int i = 0; i < metrics.length; i++) { metrics[i] = findColumn(metrics[i]).getIdentity(); } } private void initPartitionDesc() { if (this.partitionDesc != null) this.partitionDesc.init(this); } //Check if the filter condition is illegal. private void initFilterCondition() { if (null == this.filterCondition) { return; } int quotationType = 0; int len = this.filterCondition.length(); for (int i = 0; i < len; i++) { //If a ';' which is not within a string is found, throw exception. if (';' == this.filterCondition.charAt(i) && 0 == quotationType) { throw new IllegalStateException( "Filter Condition is Illegal. Please check it and make sure it's an appropriate expression for WHERE clause"); } if ('\'' == this.filterCondition.charAt(i)) { if (quotationType > 0) { if (1 == quotationType) { quotationType = 0; continue; } } else { if (0 == quotationType) { quotationType = 1; continue; } } } if ('"' == this.filterCondition.charAt(i)) { if (quotationType > 0) { if (2 == quotationType) { quotationType = 0; continue; } } else { if (0 == quotationType) { quotationType = 2; continue; } } } } } private void initJoinColumns() { for (JoinTableDesc joinTable : joinTables) { TableRef dimTable = joinTable.getTableRef(); JoinDesc join = joinTable.getJoin(); if (join == null) throw new IllegalStateException("Missing join conditions on table " + dimTable); StringUtil.toUpperCaseArray(join.getForeignKey(), join.getForeignKey()); StringUtil.toUpperCaseArray(join.getPrimaryKey(), join.getPrimaryKey()); // primary key String[] pks = join.getPrimaryKey(); TblColRef[] pkCols = new TblColRef[pks.length]; for (int i = 0; i < pks.length; i++) { TblColRef col = dimTable.getColumn(pks[i]); if (col == null) { col = findColumn(pks[i]); } if (col == null || col.getTableRef().equals(dimTable) == false) { throw new IllegalStateException("Can't find PK column " + pks[i] + " in table " + dimTable); } pks[i] = col.getIdentity(); pkCols[i] = col; } join.setPrimaryKeyColumns(pkCols); // foreign key String[] fks = join.getForeignKey(); TblColRef[] fkCols = new TblColRef[fks.length]; for (int i = 0; i < fks.length; i++) { TblColRef col = findColumn(fks[i]); if (col == null) { throw new IllegalStateException("Can't find FK column " + fks[i]); } fks[i] = col.getIdentity(); fkCols[i] = col; } join.setForeignKeyColumns(fkCols); join.sortByFK(); // Validate join in dimension TableRef fkTable = fkCols[0].getTableRef(); if (pkCols.length == 0 || fkCols.length == 0) throw new IllegalStateException("Missing join columns on table " + dimTable); if (pkCols.length != fkCols.length) { throw new IllegalStateException("Primary keys(" + dimTable + ")" + Arrays.toString(pks) + " are not consistent with Foreign keys(" + fkTable + ") " + Arrays.toString(fks)); } for (int i = 0; i < fkCols.length; i++) { if (!fkCols[i].getDatatype().equals(pkCols[i].getDatatype())) { logger.warn("PK " + dimTable + "." + pkCols[i].getName() + "." + pkCols[i].getDatatype() + " are not consistent with FK " + fkTable + "." + fkCols[i].getName() + "." + fkCols[i].getDatatype()); } } } } private void initJoinsTree() { List<JoinDesc> joins = new ArrayList<>(); for (JoinTableDesc joinTable : joinTables) { joins.add(joinTable.getJoin()); } joinsTree = new JoinsTree(rootFactTableRef, joins); } private void reorderJoins(Map<String, TableDesc> tables) { if (joinTables.length == 0) { return; } Map<String, List<JoinTableDesc>> fkMap = Maps.newHashMap(); for (JoinTableDesc joinTable : joinTables) { JoinDesc join = joinTable.getJoin(); String fkSideName = join.getFKSide().getAlias(); if (fkMap.containsKey(fkSideName)) { fkMap.get(fkSideName).add(joinTable); } else { List<JoinTableDesc> joinTableList = Lists.newArrayList(); joinTableList.add(joinTable); fkMap.put(fkSideName, joinTableList); } } JoinTableDesc[] orderedJoinTables = new JoinTableDesc[joinTables.length]; int orderedIndex = 0; Queue<JoinTableDesc> joinTableBuff = new ArrayDeque<JoinTableDesc>(); TableDesc rootDesc = tables.get(rootFactTable); joinTableBuff.addAll(fkMap.get(rootDesc.getName())); while (!joinTableBuff.isEmpty()) { JoinTableDesc head = joinTableBuff.poll(); orderedJoinTables[orderedIndex++] = head; String headAlias = head.getJoin().getPKSide().getAlias(); if (fkMap.containsKey(headAlias)) { joinTableBuff.addAll(fkMap.get(headAlias)); } } joinTables = orderedJoinTables; } private boolean validate() { // ensure no dup between dimensions/metrics for (ModelDimensionDesc dim : dimensions) { String table = dim.getTable(); for (String c : dim.getColumns()) { TblColRef dcol = findColumn(table, c); metrics = ArrayUtils.removeElement(metrics, dcol.getIdentity()); } } Set<TblColRef> mcols = new HashSet<>(); for (String m : metrics) { mcols.add(findColumn(m)); } // validate PK/FK are in dimensions boolean pkfkDimAmended = false; for (Chain chain : joinsTree.getTableChains().values()) { pkfkDimAmended = validatePkFkDim(chain.join, mcols) || pkfkDimAmended; } return pkfkDimAmended; } private boolean validatePkFkDim(JoinDesc join, Set<TblColRef> mcols) { if (join == null) return false; boolean pkfkDimAmended = false; for (TblColRef c : join.getForeignKeyColumns()) { if (!mcols.contains(c)) { pkfkDimAmended = validatePkFkDim(c) || pkfkDimAmended; } } for (TblColRef c : join.getPrimaryKeyColumns()) { if (!mcols.contains(c)) { pkfkDimAmended = validatePkFkDim(c) || pkfkDimAmended; } } return pkfkDimAmended; } private boolean validatePkFkDim(TblColRef c) { String t = c.getTableAlias(); ModelDimensionDesc dimDesc = null; for (ModelDimensionDesc dim : dimensions) { if (dim.getTable().equals(t)) { dimDesc = dim; break; } } if (dimDesc == null) { dimDesc = new ModelDimensionDesc(); dimDesc.setTable(t); dimDesc.setColumns(new String[0]); dimensions.add(dimDesc); } if (ArrayUtils.contains(dimDesc.getColumns(), c.getName()) == false) { String[] newCols = ArrayUtils.add(dimDesc.getColumns(), c.getName()); dimDesc.setColumns(newCols); return true; } return false; } public boolean isStandardPartitionedDateColumn() { if (StringUtils.isBlank(getPartitionDesc().getPartitionDateFormat())) { return false; } return true; } /** * Add error info and thrown exception out */ public void addError(String message) { addError(message, false); } /** * @param message error message * @param silent if throw exception */ public void addError(String message, boolean silent) { if (!silent) { throw new IllegalStateException(message); } else { this.errors.add(message); } } public List<String> getError() { return this.errors; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DataModelDesc modelDesc = (DataModelDesc) o; if (!name.equals(modelDesc.name)) return false; if (!getRootFactTable().equals(modelDesc.getRootFactTable())) return false; return true; } @Override public int hashCode() { int result = 0; result = 31 * result + name.hashCode(); result = 31 * result + getRootFactTable().hashCode(); return result; } @Override public String toString() { return "DataModelDesc [name=" + name + "]"; } public String getResourcePath() { return concatResourcePath(resourceName()); } public static String concatResourcePath(String descName) { return ResourceStore.DATA_MODEL_DESC_RESOURCE_ROOT + "/" + descName + MetadataConstants.FILE_SURFIX; } public List<ModelDimensionDesc> getDimensions() { return dimensions; } public String[] getMetrics() { return metrics; } public void setDimensions(List<ModelDimensionDesc> dimensions) { this.dimensions = dimensions; } public void setMetrics(String[] metrics) { this.metrics = metrics; } public String getProject() { return ProjectManager.getInstance(getConfig()).getProjectOfModel(this.getName()).getName(); } public ProjectInstance getProjectInstance() { return ProjectManager.getInstance(getConfig()).getProjectOfModel(this.getName()); } public static DataModelDesc getCopyOf(DataModelDesc orig) { return copy(orig, new DataModelDesc()); } public static DataModelDesc copy(DataModelDesc orig, DataModelDesc copy) { copy.config = orig.config; copy.name = orig.name; copy.isDraft = orig.isDraft; copy.owner = orig.owner; copy.description = orig.description; copy.rootFactTable = orig.rootFactTable; copy.joinTables = orig.joinTables; copy.dimensions = orig.dimensions; copy.metrics = orig.metrics; copy.filterCondition = orig.filterCondition; copy.capacity = orig.capacity; if (orig.getPartitionDesc() != null) { copy.partitionDesc = PartitionDesc.getCopyOf(orig.getPartitionDesc()); } copy.updateRandomUuid(); return copy; } }
package it.unibz.krdb.obda.reformulation.tests; /* * #%L * ontop-quest-owlapi3 * %% * Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import it.unibz.krdb.obda.exception.InvalidMappingException; import it.unibz.krdb.obda.exception.InvalidPredicateDeclarationException; import it.unibz.krdb.obda.io.ModelIOManager; import it.unibz.krdb.obda.model.OBDADataFactory; import it.unibz.krdb.obda.model.OBDAModel; import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl; import it.unibz.krdb.obda.owlrefplatform.core.QuestConstants; import it.unibz.krdb.obda.owlrefplatform.core.QuestPreferences; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWL; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLConfiguration; import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLFactory; import junit.framework.TestCase; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; /*** * A simple test that check if the system is able to handle mapping variants * to construct the proper datalog program. */ public class OBDA2DatalogTest extends TestCase { private OBDADataFactory fac; private Connection conn; Logger log = LoggerFactory.getLogger(this.getClass()); private OBDAModel obdaModel; private OWLOntology ontology; final String owlfile = "src/test/resources/test/mappinganalyzer/ontology.owl"; @Override public void setUp() throws Exception { // Initializing and H2 database with the stock exchange data // String driver = "org.h2.Driver"; String url = "jdbc:h2:mem:questjunitdb"; String username = "sa"; String password = ""; fac = OBDADataFactoryImpl.getInstance(); conn = DriverManager.getConnection(url, username, password); Statement st = conn.createStatement(); FileReader reader = new FileReader("src/test/resources/test/mappinganalyzer/create-tables.sql"); BufferedReader in = new BufferedReader(reader); StringBuilder bf = new StringBuilder(); String line = in.readLine(); while (line != null) { bf.append(line); line = in.readLine(); } st.executeUpdate(bf.toString()); conn.commit(); // Loading the OWL file OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); ontology = manager.loadOntologyFromOntologyDocument((new File(owlfile))); obdaModel = fac.getOBDAModel(); } @Override public void tearDown() throws Exception { dropTables(); conn.close(); } private void dropTables() throws SQLException, IOException { Statement st = conn.createStatement(); FileReader reader = new FileReader("src/test/resources/test/mappinganalyzer/drop-tables.sql"); BufferedReader in = new BufferedReader(reader); StringBuilder bf = new StringBuilder(); String line = in.readLine(); while (line != null) { bf.append(line); line = in.readLine(); } st.executeUpdate(bf.toString()); st.close(); conn.commit(); } private void runTests() throws Exception { QuestPreferences p = new QuestPreferences(); p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL); p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true"); // Creating a new instance of the reasoner QuestOWLFactory factory = new QuestOWLFactory(); QuestOWLConfiguration config = QuestOWLConfiguration.builder().obdaModel(obdaModel).preferences(p).build(); QuestOWL reasoner = factory.createReasoner(ontology, config); // Get ready for querying reasoner.getStatement(); } public void testMapping_1() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_1.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_2() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_2.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_3() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_3.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_4() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_4.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_5() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_5.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), true); // FAIL } } public void testMapping_6() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_6.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_7() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_7.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_8() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_8.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), true); // FAIL we cannot handle the case in the look up table were id map to two different values } } public void testMapping_9() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_9.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), true); // FAIL we cannot handle the case in the look up table were id map to two different values } } public void testMapping_10() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_10.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), true); // FAIL we cannot handle the case in the look up table were alias map to two different values } } public void testMapping_11() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_11.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_12() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_12.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), true); // FAIL we cannot handle the case in the look up table were name map to two different values } } public void testMapping_13() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_13.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_14() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_14.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), true); // FAIL } } public void testMapping_15() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_15.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_16() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_16.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } public void testMapping_17() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_17.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), true); // FAIL } } public void testMapping_18() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException { ModelIOManager ioManager = new ModelIOManager(obdaModel); ioManager.load("src/test/resources/test/mappinganalyzer/case_18.obda"); try { runTests(); } catch (Exception e) { assertTrue(e.toString(), false); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gora.cassandra.serializers; import java.nio.ByteBuffer; import me.prettyprint.cassandra.serializers.BytesArraySerializer; import me.prettyprint.cassandra.serializers.ByteBufferSerializer; import me.prettyprint.cassandra.serializers.BooleanSerializer; import me.prettyprint.cassandra.serializers.DoubleSerializer; import me.prettyprint.cassandra.serializers.FloatSerializer; import me.prettyprint.cassandra.serializers.IntegerSerializer; import me.prettyprint.cassandra.serializers.LongSerializer; import me.prettyprint.cassandra.serializers.StringSerializer; import me.prettyprint.cassandra.serializers.SerializerTypeInferer; import me.prettyprint.hector.api.Serializer; import org.apache.avro.Schema; import org.apache.avro.Schema.Type; import org.apache.avro.generic.GenericArray; import org.apache.avro.specific.SpecificFixed; import org.apache.avro.util.Utf8; import org.apache.gora.persistency.StatefulHashMap; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Utility class that infers the concrete Serializer needed to turn a value into * its binary representation */ public class GoraSerializerTypeInferer { public static final Logger LOG = LoggerFactory.getLogger(GoraSerializerTypeInferer.class); @SuppressWarnings({ "rawtypes", "unchecked" }) public static <T> Serializer<T> getSerializer(Object value) { Serializer serializer = null; if (value == null) { serializer = ByteBufferSerializer.get(); } else if (value instanceof Utf8) { serializer = Utf8Serializer.get(); } else if (value instanceof Boolean) { serializer = BooleanSerializer.get(); } else if (value instanceof ByteBuffer) { serializer = ByteBufferSerializer.get(); } else if (value instanceof byte[]) { serializer = BytesArraySerializer.get(); } else if (value instanceof Double) { serializer = DoubleSerializer.get(); } else if (value instanceof Float) { serializer = FloatSerializer.get(); } else if (value instanceof Integer) { serializer = IntegerSerializer.get(); } else if (value instanceof Long) { serializer = LongSerializer.get(); } else if (value instanceof String) { serializer = StringSerializer.get(); } else if (value instanceof SpecificFixed) { serializer = SpecificFixedSerializer.get(value.getClass()); } else if (value instanceof GenericArray) { Schema schema = ((GenericArray)value).getSchema(); if (schema.getType() == Type.ARRAY) { schema = schema.getElementType(); } serializer = GenericArraySerializer.get(schema); } else if (value instanceof StatefulHashMap) { StatefulHashMap map = (StatefulHashMap)value; if (map.size() == 0) { serializer = ByteBufferSerializer.get(); } else { Object value0 = map.values().iterator().next(); Schema schema = TypeUtils.getSchema(value0); serializer = StatefulHashMapSerializer.get(schema); } } else { serializer = SerializerTypeInferer.getSerializer(value); } return serializer; } @SuppressWarnings({ "rawtypes", "unchecked" }) public static <T> Serializer<T> getSerializer(Class<?> valueClass) { Serializer serializer = null; if (valueClass.equals(Utf8.class)) { serializer = Utf8Serializer.get(); } else if (valueClass.equals(Boolean.class) || valueClass.equals(boolean.class)) { serializer = BooleanSerializer.get(); } else if (valueClass.equals(ByteBuffer.class)) { serializer = ByteBufferSerializer.get(); } else if (valueClass.equals(Double.class) || valueClass.equals(double.class)) { serializer = DoubleSerializer.get(); } else if (valueClass.equals(Float.class) || valueClass.equals(float.class)) { serializer = FloatSerializer.get(); } else if (valueClass.equals(Integer.class) || valueClass.equals(int.class)) { serializer = IntegerSerializer.get(); } else if (valueClass.equals(Long.class) || valueClass.equals(long.class)) { serializer = LongSerializer.get(); } else if (valueClass.equals(String.class)) { serializer = StringSerializer.get(); } else { serializer = SerializerTypeInferer.getSerializer(valueClass); } return serializer; } @SuppressWarnings({ "rawtypes", "unchecked" }) public static <T> Serializer<T> getSerializer(Schema schema) { Serializer serializer = null; Type type = schema.getType(); if (type == Type.STRING) { serializer = Utf8Serializer.get(); } else if (type == Type.BOOLEAN) { serializer = BooleanSerializer.get(); } else if (type == Type.BYTES) { serializer = ByteBufferSerializer.get(); } else if (type == Type.DOUBLE) { serializer = DoubleSerializer.get(); } else if (type == Type.FLOAT) { serializer = FloatSerializer.get(); } else if (type == Type.INT) { serializer = IntegerSerializer.get(); } else if (type == Type.LONG) { serializer = LongSerializer.get(); } else if (type == Type.FIXED) { Class clazz = TypeUtils.getClass(schema); serializer = SpecificFixedSerializer.get(clazz); // serializer = SpecificFixedSerializer.get(schema); } else if (type == Type.ARRAY) { serializer = GenericArraySerializer.get(schema.getElementType()); } else if (type == Type.MAP) { serializer = StatefulHashMapSerializer.get(schema.getValueType()); } else if (type == Type.UNION){ serializer = ByteBufferSerializer.get(); } else { serializer = null; } return serializer; } @SuppressWarnings({ "rawtypes", "unchecked" }) public static <T> Serializer<T> getSerializer(Type type) { Serializer serializer = null; if (type == Type.STRING) { serializer = Utf8Serializer.get(); } else if (type == Type.BOOLEAN) { serializer = BooleanSerializer.get(); } else if (type == Type.BYTES) { serializer = ByteBufferSerializer.get(); } else if (type == Type.DOUBLE) { serializer = DoubleSerializer.get(); } else if (type == Type.FLOAT) { serializer = FloatSerializer.get(); } else if (type == Type.INT) { serializer = IntegerSerializer.get(); } else if (type == Type.LONG) { serializer = LongSerializer.get(); } else if (type == Type.FIXED) { serializer = SpecificFixedSerializer.get(); } else { serializer = null; } return serializer; } @SuppressWarnings({ "rawtypes", "unchecked" }) public static <T> Serializer<T> getSerializer(Type type, Type elementType) { Serializer serializer = null; if (type == null) { if (elementType == null) { serializer = null; } else { serializer = getSerializer(elementType); } } else { if (elementType == null) { serializer = getSerializer(type); } } if (type == Type.ARRAY) { serializer = GenericArraySerializer.get(elementType); } else if (type == Type.MAP) { serializer = StatefulHashMapSerializer.get(elementType); } else { serializer = null; } return serializer; } @SuppressWarnings({ "rawtypes", "unchecked" }) public static <T> Serializer<T> getSerializer(Type type, Class<T> clazz) { Serializer serializer = null; if (type != Type.FIXED) { serializer = null; } if (clazz == null) { serializer = null; } else { serializer = SpecificFixedSerializer.get(clazz); } return serializer; } }
/* * Copyright (C) 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.libraries.cast.companionlibrary.cast.reconnection; import static com.google.android.libraries.cast.companionlibrary.utils.LogUtils.LOGD; import static com.google.android.libraries.cast.companionlibrary.utils.LogUtils.LOGE; import com.google.android.libraries.cast.companionlibrary.cast.BaseCastManager; import com.google.android.libraries.cast.companionlibrary.cast.VideoCastManager; import com.google.android.libraries.cast.companionlibrary.cast.exceptions.NoConnectionException; import com.google.android.libraries.cast.companionlibrary.cast.exceptions.TransientNetworkDisconnectionException; import com.google.android.libraries.cast.companionlibrary.utils.LogUtils; import com.google.android.libraries.cast.companionlibrary.utils.Utils; import android.app.Service; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.net.NetworkInfo; import android.net.wifi.WifiManager; import android.os.IBinder; import android.os.SystemClock; import java.util.Timer; import java.util.TimerTask; /** * A service to run in the background when the playback of a media starts, to help with reconnection * if needed. Due to various reasons, connectivity to the cast device can be lost; for example wifi * radio may turn off when device goes to sleep or user may step outside of the wifi range, etc. * This service helps with recovering the connectivity when circumstances are right, for example * when user steps back within the wifi range, etc. In order to avoid ending up with a background * service that lingers around longer than it is needed, this implementation uses certain heuristics * to stop itself when needed. */ public class ReconnectionService extends Service { private static final String TAG = LogUtils.makeLogTag(ReconnectionService.class); // the tolerance for considering a time value (in millis) to be zero private static final long EPSILON_MS = 500; private static final int RECONNECTION_ATTEMPT_PERIOD_S = 15; private BroadcastReceiver mScreenOnOffBroadcastReceiver; private VideoCastManager mCastManager; private BroadcastReceiver mWifiBroadcastReceiver; private boolean mWifiConnectivity = true; private Timer mEndTimer; private TimerTask mEndTimerTask; @Override public int onStartCommand(Intent intent, int flags, int startId) { LOGD(TAG, "onStartCommand() is called"); setUpEndTimer(); return Service.START_STICKY; } @Override public void onCreate() { LOGD(TAG, "onCreate() is called"); mCastManager = VideoCastManager.getInstance(); if (!mCastManager.isConnected() && !mCastManager.isConnecting()) { mCastManager.reconnectSessionIfPossible(); } // register a broadcast receiver to be notified when screen goes on or off IntentFilter screenOnOffIntentFilter = new IntentFilter(Intent.ACTION_SCREEN_ON); screenOnOffIntentFilter.addAction(Intent.ACTION_SCREEN_OFF); mScreenOnOffBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { LOGD(TAG, "ScreenOnOffBroadcastReceiver: onReceive(): " + intent.getAction()); long timeLeft = getMediaRemainingTime(); if (timeLeft < EPSILON_MS) { handleTermination(); } } }; registerReceiver(mScreenOnOffBroadcastReceiver, screenOnOffIntentFilter); // register a wifi receiver that would be notified when the network state changes IntentFilter networkIntentFilter = new IntentFilter(); networkIntentFilter.addAction(WifiManager.NETWORK_STATE_CHANGED_ACTION); mWifiBroadcastReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { final String action = intent.getAction(); if (action.equals(WifiManager.NETWORK_STATE_CHANGED_ACTION)) { NetworkInfo info = intent.getParcelableExtra(WifiManager.EXTRA_NETWORK_INFO); boolean connected = info.isConnected(); String networkSsid = connected ? Utils.getWifiSsid(context) : null; ReconnectionService.this.onWifiConnectivityChanged(connected, networkSsid); } } }; registerReceiver(mWifiBroadcastReceiver, networkIntentFilter); super.onCreate(); } /** * Since framework calls this method twice when a change happens, we are guarding against that * by caching the state the first time and avoiding the second call if it is the same status. */ public void onWifiConnectivityChanged(boolean connected, final String networkSsid) { LOGD(TAG, "WIFI connectivity changed to " + (connected ? "enabled" : "disabled")); if (connected && !mWifiConnectivity) { mWifiConnectivity = true; if (mCastManager.isFeatureEnabled(BaseCastManager.FEATURE_WIFI_RECONNECT)) { mCastManager.startCastDiscovery(); mCastManager.reconnectSessionIfPossible(RECONNECTION_ATTEMPT_PERIOD_S, networkSsid); } } else { mWifiConnectivity = connected; } } @Override public void onDestroy() { LOGD(TAG, "onDestroy()"); if (mScreenOnOffBroadcastReceiver != null) { unregisterReceiver(mScreenOnOffBroadcastReceiver); mScreenOnOffBroadcastReceiver = null; } if (mWifiBroadcastReceiver != null) { unregisterReceiver(mWifiBroadcastReceiver); mWifiBroadcastReceiver = null; } clearEndTimer(); super.onDestroy(); } @Override public IBinder onBind(Intent intent) { return null; } private void setUpEndTimer() { LOGD(TAG, "setUpEndTimer(): setting up a timer for the end of current media"); long timeLeft = getMediaRemainingTime(); if (timeLeft <= 0) { stopSelf(); return; } clearEndTimer(); mEndTimer = new Timer(); mEndTimerTask = new TimerTask() { @Override public void run() { LOGD(TAG, "setUpEndTimer(): stopping ReconnectionService since reached the end of" + " allotted time"); handleTermination(); } }; mEndTimer.schedule(mEndTimerTask, timeLeft); } private void clearEndTimer() { if (mEndTimerTask != null) { mEndTimerTask.cancel(); mEndTimerTask = null; } if (mEndTimer != null) { mEndTimer.cancel(); mEndTimer = null; } } private long getMediaRemainingTime() { long endTime = mCastManager.getPreferenceAccessor().getLongFromPreference( BaseCastManager.PREFS_KEY_MEDIA_END, 0); return endTime - SystemClock.elapsedRealtime(); } private void handleTermination() { if (!mCastManager.isConnected()) { mCastManager.clearMediaSession(); mCastManager.clearPersistedConnectionInfo(BaseCastManager.CLEAR_ALL); stopSelf(); } else { // since we are connected and our timer has gone off, lets update the time remaining // on the media (since media may have been paused) and reset teh time left long timeLeft = 0; try { timeLeft = mCastManager.isRemoteStreamLive() ? 0 : mCastManager.getMediaTimeRemaining(); } catch (TransientNetworkDisconnectionException | NoConnectionException e) { LOGE(TAG, "Failed to calculate the time left for media due to lack of connectivity", e); } if (timeLeft < EPSILON_MS) { // no time left stopSelf(); } else { // lets reset the counter mCastManager.getPreferenceAccessor().saveLongToPreference( BaseCastManager.PREFS_KEY_MEDIA_END, timeLeft + SystemClock.elapsedRealtime()); LOGD(TAG, "handleTermination(): resetting the timer"); setUpEndTimer(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.types.utils; import org.apache.flink.annotation.Internal; import org.apache.flink.api.common.typeinfo.BasicArrayTypeInfo; import org.apache.flink.api.common.typeinfo.PrimitiveArrayTypeInfo; import org.apache.flink.api.common.typeinfo.TypeInformation; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.common.typeutils.CompositeType; import org.apache.flink.api.java.typeutils.MapTypeInfo; import org.apache.flink.api.java.typeutils.MultisetTypeInfo; import org.apache.flink.api.java.typeutils.ObjectArrayTypeInfo; import org.apache.flink.api.java.typeutils.RowTypeInfo; import org.apache.flink.table.api.DataTypes; import org.apache.flink.table.api.TableException; import org.apache.flink.table.catalog.DataTypeFactory; import org.apache.flink.table.data.RowData; import org.apache.flink.table.types.AtomicDataType; import org.apache.flink.table.types.CollectionDataType; import org.apache.flink.table.types.DataType; import org.apache.flink.table.types.DataTypeQueryable; import org.apache.flink.table.types.FieldsDataType; import org.apache.flink.table.types.KeyValueDataType; import org.apache.flink.table.types.logical.LegacyTypeInformationType; import org.apache.flink.table.types.logical.LogicalType; import org.apache.flink.table.types.logical.LogicalTypeRoot; import org.apache.flink.table.types.logical.RowType; import org.apache.flink.table.types.logical.TimestampKind; import org.apache.flink.table.types.logical.TimestampType; import org.apache.flink.table.types.logical.TypeInformationRawType; import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo; import org.apache.flink.table.typeutils.TimeIntervalTypeInfo; import org.apache.flink.types.Row; import org.apache.flink.util.Preconditions; import java.sql.Timestamp; import java.time.Instant; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.util.HashMap; import java.util.Map; import java.util.stream.IntStream; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.hasRoot; import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isRowtimeAttribute; /** * Converter between {@link TypeInformation} and {@link DataType} that reflects the behavior before * Flink 1.9. The conversion is a 1:1 mapping that allows back-and-forth conversion. Note that * nullability constraints might get lost during the back-and-forth conversion. * * <p>This converter only exists to still support deprecated methods that take or return {@link * TypeInformation}. Some methods will still support type information in the future, however, the * future type information support will integrate nicer with the new type stack. * * <p>This converter reflects the old behavior that includes: * * <ul> * <li>Use old {@code java.sql.*} time classes for time data types. * <li>Only support millisecond precision for timestamps or day-time intervals. * <li>Let variable precision and scale for decimal types pass through the planner. * <li>Inconsistent nullability. Most types are nullable even though type information does not * support it. * <li>Distinction between {@link BasicArrayTypeInfo} and {@link ObjectArrayTypeInfo}. * <li>Let POJOs, case classes, and tuples pass through the planner. * </ul> * * <p>Any changes here need to be applied to the legacy planner as well. * * @deprecated Use {@link DataTypeFactory#createDataType(TypeInformation)} instead. Note that this * method will not create legacy types anymore. It fully uses the new type system available only * in the Blink planner. */ @Internal @Deprecated public final class LegacyTypeInfoDataTypeConverter { private static final Map<TypeInformation<?>, DataType> typeInfoDataTypeMap = new HashMap<>(); private static final Map<DataType, TypeInformation<?>> dataTypeTypeInfoMap = new HashMap<>(); static { addMapping(Types.STRING, DataTypes.STRING().bridgedTo(String.class)); addMapping(Types.BOOLEAN, DataTypes.BOOLEAN().bridgedTo(Boolean.class)); addMapping(Types.BYTE, DataTypes.TINYINT().bridgedTo(Byte.class)); addMapping(Types.SHORT, DataTypes.SMALLINT().bridgedTo(Short.class)); addMapping(Types.INT, DataTypes.INT().bridgedTo(Integer.class)); addMapping(Types.LONG, DataTypes.BIGINT().bridgedTo(Long.class)); addMapping(Types.FLOAT, DataTypes.FLOAT().bridgedTo(Float.class)); addMapping(Types.DOUBLE, DataTypes.DOUBLE().bridgedTo(Double.class)); addMapping(Types.BIG_DEC, createLegacyType(LogicalTypeRoot.DECIMAL, Types.BIG_DEC)); addMapping(Types.LOCAL_DATE, DataTypes.DATE().bridgedTo(LocalDate.class)); addMapping(Types.LOCAL_TIME, DataTypes.TIME(0).bridgedTo(LocalTime.class)); addMapping(Types.LOCAL_DATE_TIME, DataTypes.TIMESTAMP(3).bridgedTo(LocalDateTime.class)); addMapping( Types.INSTANT, DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).bridgedTo(Instant.class)); addMapping(Types.SQL_DATE, DataTypes.DATE().bridgedTo(java.sql.Date.class)); addMapping(Types.SQL_TIME, DataTypes.TIME(0).bridgedTo(java.sql.Time.class)); addMapping(Types.SQL_TIMESTAMP, DataTypes.TIMESTAMP(3).bridgedTo(java.sql.Timestamp.class)); addMapping( TimeIntervalTypeInfo.INTERVAL_MONTHS, DataTypes.INTERVAL(DataTypes.MONTH()).bridgedTo(Integer.class)); addMapping( TimeIntervalTypeInfo.INTERVAL_MILLIS, DataTypes.INTERVAL(DataTypes.SECOND(3)).bridgedTo(Long.class)); addMapping( PrimitiveArrayTypeInfo.BOOLEAN_PRIMITIVE_ARRAY_TYPE_INFO, DataTypes.ARRAY(DataTypes.BOOLEAN().notNull().bridgedTo(boolean.class)) .bridgedTo(boolean[].class)); addMapping( PrimitiveArrayTypeInfo.BYTE_PRIMITIVE_ARRAY_TYPE_INFO, DataTypes.BYTES().bridgedTo(byte[].class)); addMapping( PrimitiveArrayTypeInfo.SHORT_PRIMITIVE_ARRAY_TYPE_INFO, DataTypes.ARRAY(DataTypes.SMALLINT().notNull().bridgedTo(short.class)) .bridgedTo(short[].class)); addMapping( PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO, DataTypes.ARRAY(DataTypes.INT().notNull().bridgedTo(int.class)) .bridgedTo(int[].class)); addMapping( PrimitiveArrayTypeInfo.LONG_PRIMITIVE_ARRAY_TYPE_INFO, DataTypes.ARRAY(DataTypes.BIGINT().notNull().bridgedTo(long.class)) .bridgedTo(long[].class)); addMapping( PrimitiveArrayTypeInfo.FLOAT_PRIMITIVE_ARRAY_TYPE_INFO, DataTypes.ARRAY(DataTypes.FLOAT().notNull().bridgedTo(float.class)) .bridgedTo(float[].class)); addMapping( PrimitiveArrayTypeInfo.DOUBLE_PRIMITIVE_ARRAY_TYPE_INFO, DataTypes.ARRAY(DataTypes.DOUBLE().notNull().bridgedTo(double.class)) .bridgedTo(double[].class)); } private static void addMapping(TypeInformation<?> typeInfo, DataType dataType) { Preconditions.checkArgument(!typeInfoDataTypeMap.containsKey(typeInfo)); typeInfoDataTypeMap.put(typeInfo, dataType); dataTypeTypeInfoMap.put(dataType, typeInfo); } public static DataType toDataType(TypeInformation<?> typeInfo) { // time indicators first as their hashCode/equals is shared with those of regular timestamps if (typeInfo instanceof TimeIndicatorTypeInfo) { return convertToTimeAttributeType((TimeIndicatorTypeInfo) typeInfo); } final DataType foundDataType = typeInfoDataTypeMap.get(typeInfo); if (foundDataType != null) { return foundDataType; } if (typeInfo instanceof RowTypeInfo) { return convertToRowType((RowTypeInfo) typeInfo); } else if (typeInfo instanceof ObjectArrayTypeInfo) { return convertToArrayType( typeInfo.getTypeClass(), ((ObjectArrayTypeInfo) typeInfo).getComponentInfo()); } else if (typeInfo instanceof BasicArrayTypeInfo) { return createLegacyType(LogicalTypeRoot.ARRAY, typeInfo); } else if (typeInfo instanceof MultisetTypeInfo) { return convertToMultisetType(((MultisetTypeInfo) typeInfo).getElementTypeInfo()); } else if (typeInfo instanceof MapTypeInfo) { return convertToMapType((MapTypeInfo) typeInfo); } else if (typeInfo instanceof CompositeType || isRowData(typeInfo)) { return createLegacyType(LogicalTypeRoot.STRUCTURED_TYPE, typeInfo); } return createLegacyType(LogicalTypeRoot.RAW, typeInfo); } public static TypeInformation<?> toLegacyTypeInfo(DataType dataType) { // time indicators first as their hashCode/equals is shared with those of regular timestamps if (canConvertToTimeAttributeTypeInfo(dataType)) { return convertToTimeAttributeTypeInfo((TimestampType) dataType.getLogicalType()); } // check in the map but relax the nullability constraint as every not null data type can be // stored in the corresponding nullable type information final TypeInformation<?> foundTypeInfo = dataTypeTypeInfoMap.get(dataType.nullable()); if (foundTypeInfo != null) { return foundTypeInfo; } // we are relaxing the constraint for DECIMAL, CHAR, VARCHAR, TIMESTAMP_WITHOUT_TIME_ZONE to // support value literals in legacy planner LogicalType logicalType = dataType.getLogicalType(); if (hasRoot(logicalType, LogicalTypeRoot.DECIMAL)) { return Types.BIG_DEC; } else if (hasRoot(logicalType, LogicalTypeRoot.CHAR)) { return Types.STRING; } else if (hasRoot(logicalType, LogicalTypeRoot.VARCHAR)) { return Types.STRING; } // relax the precision constraint as Timestamp can store the highest precision else if (hasRoot(logicalType, LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE) && dataType.getConversionClass() == Timestamp.class) { return Types.SQL_TIMESTAMP; } // relax the precision constraint as LocalDateTime can store the highest precision else if (hasRoot(logicalType, LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE) && dataType.getConversionClass() == LocalDateTime.class) { return Types.LOCAL_DATE_TIME; } // relax the precision constraint as LocalTime can store the highest precision else if (hasRoot(logicalType, LogicalTypeRoot.TIME_WITHOUT_TIME_ZONE) && dataType.getConversionClass() == LocalTime.class) { return Types.LOCAL_TIME; } else if (canConvertToLegacyTypeInfo(dataType)) { return convertToLegacyTypeInfo(dataType); } else if (canConvertToRowTypeInfo(dataType)) { return convertToRowTypeInfo((FieldsDataType) dataType); } // this could also match for basic array type info but this is covered by legacy type info else if (canConvertToObjectArrayTypeInfo(dataType)) { return convertToObjectArrayTypeInfo((CollectionDataType) dataType); } else if (canConvertToMultisetTypeInfo(dataType)) { return convertToMultisetTypeInfo((CollectionDataType) dataType); } else if (canConvertToMapTypeInfo(dataType)) { return convertToMapTypeInfo((KeyValueDataType) dataType); } // makes the raw type accessible in the legacy planner else if (canConvertToRawTypeInfo(dataType)) { return convertToRawTypeInfo(dataType); } throw new TableException( String.format( "Unsupported conversion from data type '%s' (conversion class: %s) to type information. Only data types " + "that originated from type information fully support a reverse conversion.", dataType, dataType.getConversionClass().getName())); } private static DataType createLegacyType( LogicalTypeRoot typeRoot, TypeInformation<?> typeInfo) { return new AtomicDataType(new LegacyTypeInformationType<>(typeRoot, typeInfo)) .bridgedTo(typeInfo.getTypeClass()); } private static DataType convertToTimeAttributeType( TimeIndicatorTypeInfo timeIndicatorTypeInfo) { final TimestampKind kind; if (timeIndicatorTypeInfo.isEventTime()) { kind = TimestampKind.ROWTIME; } else { kind = TimestampKind.PROCTIME; } return new AtomicDataType(new TimestampType(true, kind, 3)) .bridgedTo(java.sql.Timestamp.class); } private static boolean canConvertToTimeAttributeTypeInfo(DataType dataType) { return hasRoot(dataType.getLogicalType(), LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE) && dataTypeTypeInfoMap.containsKey(dataType.nullable()) && // checks precision and conversion and ignore nullable ((TimestampType) dataType.getLogicalType()).getKind() != TimestampKind.REGULAR; } private static TypeInformation<?> convertToTimeAttributeTypeInfo(TimestampType timestampType) { if (isRowtimeAttribute(timestampType)) { return TimeIndicatorTypeInfo.ROWTIME_INDICATOR; } else { return TimeIndicatorTypeInfo.PROCTIME_INDICATOR; } } private static DataType convertToRowType(RowTypeInfo rowTypeInfo) { final String[] fieldNames = rowTypeInfo.getFieldNames(); final DataTypes.Field[] fields = IntStream.range(0, rowTypeInfo.getArity()) .mapToObj( i -> { DataType fieldType = toDataType(rowTypeInfo.getTypeAt(i)); return DataTypes.FIELD(fieldNames[i], fieldType); }) .toArray(DataTypes.Field[]::new); return DataTypes.ROW(fields).bridgedTo(Row.class); } private static boolean canConvertToRowTypeInfo(DataType dataType) { return hasRoot(dataType.getLogicalType(), LogicalTypeRoot.ROW) && dataType.getConversionClass().equals(Row.class) && ((RowType) dataType.getLogicalType()) .getFields().stream().noneMatch(f -> f.getDescription().isPresent()); } private static TypeInformation<?> convertToRowTypeInfo(FieldsDataType fieldsDataType) { final RowType rowType = (RowType) fieldsDataType.getLogicalType(); final String[] fieldNames = rowType.getFields().stream().map(RowType.RowField::getName).toArray(String[]::new); final TypeInformation<?>[] fieldTypes = fieldsDataType.getChildren().stream() .map(LegacyTypeInfoDataTypeConverter::toLegacyTypeInfo) .toArray(TypeInformation[]::new); return Types.ROW_NAMED(fieldNames, fieldTypes); } private static DataType convertToArrayType( Class<?> arrayClass, TypeInformation<?> elementTypeInfo) { return DataTypes.ARRAY(toDataType(elementTypeInfo)).bridgedTo(arrayClass); } private static boolean canConvertToObjectArrayTypeInfo(DataType dataType) { return hasRoot(dataType.getLogicalType(), LogicalTypeRoot.ARRAY) && dataType.getConversionClass().isArray(); } private static TypeInformation<?> convertToObjectArrayTypeInfo( CollectionDataType collectionDataType) { // Types.OBJECT_ARRAY would return a basic type info for strings return ObjectArrayTypeInfo.getInfoFor( toLegacyTypeInfo(collectionDataType.getElementDataType())); } private static DataType convertToMultisetType(TypeInformation elementTypeInfo) { return DataTypes.MULTISET(toDataType(elementTypeInfo)).bridgedTo(Map.class); } private static boolean canConvertToMultisetTypeInfo(DataType dataType) { return hasRoot(dataType.getLogicalType(), LogicalTypeRoot.MULTISET) && dataType.getConversionClass() == Map.class; } private static TypeInformation<?> convertToMultisetTypeInfo( CollectionDataType collectionDataType) { return new MultisetTypeInfo<>(toLegacyTypeInfo(collectionDataType.getElementDataType())); } private static DataType convertToMapType(MapTypeInfo typeInfo) { return DataTypes.MAP( toDataType(typeInfo.getKeyTypeInfo()), toDataType(typeInfo.getValueTypeInfo())) .bridgedTo(Map.class); } private static boolean canConvertToMapTypeInfo(DataType dataType) { return hasRoot(dataType.getLogicalType(), LogicalTypeRoot.MAP) && dataType.getConversionClass() == Map.class; } private static TypeInformation<?> convertToMapTypeInfo(KeyValueDataType dataType) { return Types.MAP( toLegacyTypeInfo(dataType.getKeyDataType()), toLegacyTypeInfo(dataType.getValueDataType())); } private static boolean canConvertToLegacyTypeInfo(DataType dataType) { return dataType.getLogicalType() instanceof LegacyTypeInformationType; } private static TypeInformation<?> convertToLegacyTypeInfo(DataType dataType) { return ((LegacyTypeInformationType) dataType.getLogicalType()).getTypeInformation(); } private static boolean canConvertToRawTypeInfo(DataType dataType) { return dataType.getLogicalType() instanceof TypeInformationRawType && dataType.getConversionClass() .equals( ((TypeInformationRawType) dataType.getLogicalType()) .getTypeInformation() .getTypeClass()); } private static TypeInformation<?> convertToRawTypeInfo(DataType dataType) { return ((TypeInformationRawType) dataType.getLogicalType()).getTypeInformation(); } /** * Temporary solution to enable tests with type information and internal data structures until * we drop all legacy types. */ private static boolean isRowData(TypeInformation<?> typeInfo) { if (!(typeInfo instanceof DataTypeQueryable)) { return false; } final DataType dataType = ((DataTypeQueryable) typeInfo).getDataType(); return dataType.getConversionClass() == RowData.class; } private LegacyTypeInfoDataTypeConverter() { // no instantiation } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.core.definition.adapter.binding; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import org.kie.workbench.common.stunner.core.definition.adapter.DefinitionId; import org.kie.workbench.common.stunner.core.definition.property.PropertyMetaTypes; import org.kie.workbench.common.stunner.core.factory.graph.ElementFactory; import org.kie.workbench.common.stunner.core.util.DefinitionUtils; public abstract class AbstractBindableDefinitionAdapter<T> implements BindableDefinitionAdapter<T> { protected DefinitionUtils definitionUtils; protected Map<PropertyMetaTypes, Class> metaPropertyTypeClasses; protected Map<Class, Class> baseTypes; protected Map<Class, Set<String>> propertySetsFieldNames; protected Map<Class, Set<String>> propertiesFieldNames; protected Map<Class, Class> propertyGraphFactoryFieldNames; protected Map<Class, String> propertyIdFieldNames; protected Map<Class, String> propertyLabelsFieldNames; protected Map<Class, String> propertyTitleFieldNames; protected Map<Class, String> propertyCategoryFieldNames; protected Map<Class, String> propertyDescriptionFieldNames; protected Map<Class, String> propertyNameFields; public AbstractBindableDefinitionAdapter(final DefinitionUtils definitionUtils) { this.definitionUtils = definitionUtils; } protected abstract Set<?> getBindProperties(final T pojo); @Override public void setBindings(final Map<PropertyMetaTypes, Class> metaPropertyTypeClasses, final Map<Class, Class> baseTypes, final Map<Class, Set<String>> propertySetsFieldNames, final Map<Class, Set<String>> propertiesFieldNames, final Map<Class, Class> propertyGraphFactoryFieldNames, final Map<Class, String> propertyIdFieldNames, final Map<Class, String> propertyLabelsFieldNames, final Map<Class, String> propertyTitleFieldNames, final Map<Class, String> propertyCategoryFieldNames, final Map<Class, String> propertyDescriptionFieldNames, final Map<Class, String> propertyNameFields) { this.metaPropertyTypeClasses = metaPropertyTypeClasses; this.baseTypes = baseTypes; this.propertySetsFieldNames = propertySetsFieldNames; this.propertiesFieldNames = propertiesFieldNames; this.propertyGraphFactoryFieldNames = propertyGraphFactoryFieldNames; this.propertyIdFieldNames = propertyIdFieldNames; this.propertyLabelsFieldNames = propertyLabelsFieldNames; this.propertyTitleFieldNames = propertyTitleFieldNames; this.propertyCategoryFieldNames = propertyCategoryFieldNames; this.propertyDescriptionFieldNames = propertyDescriptionFieldNames; this.propertyNameFields = propertyNameFields; } protected abstract String getStringFieldValue(T pojo, String fieldName); @Override public DefinitionId getId(final T pojo) { final String fieldId = getIdField(pojo); final String definitionId = getDefinitionId(pojo.getClass()); if (null != fieldId) { final String id = BindableAdapterUtils.getDynamicDefinitionId(definitionId, getStringFieldValue(pojo, fieldId)); return DefinitionId.build(id, definitionId.length()); } return DefinitionId.build(definitionId); } @Override @SuppressWarnings("unchecked") public String getBaseType(final Class<?> type) { final Class<?> baseType = baseTypes.get(type); if (null != baseType) { return getDefinitionId(baseType); } return null; } @Override public String[] getTypes(final String baseType) { List<String> result = new LinkedList<>(); for (Map.Entry<Class, Class> entry : baseTypes.entrySet()) { final Class type = entry.getKey(); final Class _baseType = entry.getValue(); final String _id = getDefinitionId(_baseType); if (baseType.equals(_id)) { result.add(getDefinitionId(type)); } } if (!result.isEmpty()) { return result.toArray(new String[result.size()]); } return null; } @Override public Object getMetaProperty(final PropertyMetaTypes metaPropertyType, final T pojo) { final Class pClass = metaPropertyTypeClasses.get(metaPropertyType); if (null != pClass) { final Set<?> properties = getProperties(pojo); if (null != properties) { return properties.stream() .filter(property -> pClass.equals(property.getClass())) .findFirst() .orElse(null); } } return null; } public Set<?> getProperties(final T pojo) { final Set<Object> result = new HashSet<>(); // Obtain all properties from property sets. final Set<?> propertySetProperties = definitionUtils.getPropertiesFromPropertySets(pojo); if (null != propertySetProperties) { result.addAll(propertySetProperties); } final Set<?> bindProperties = getBindProperties(pojo); if (null != bindProperties && !bindProperties.isEmpty()) { result.addAll(bindProperties); } return result; } @Override public Class<? extends ElementFactory> getGraphFactoryType(final T pojo) { return getGraphFactory(pojo.getClass()); } @Override @SuppressWarnings("unchecked") public Class<? extends ElementFactory> getGraphFactory(final Class<?> type) { return getPropertyGraphFactoryFieldNames().get(type); } public boolean accepts(final Class<?> type) { final boolean hasType = getPropertyCategoryFieldNames().containsKey(type); // If not types found, check if it's a super type. return hasType || baseTypes.values().contains(type); } @Override public boolean isPojoModel() { return true; } @Override public int getPriority() { return 0; } protected Map<Class, Set<String>> getPropertySetsFieldNames() { return propertySetsFieldNames; } protected Map<Class, Set<String>> getPropertiesFieldNames() { return propertiesFieldNames; } protected Map<Class, Class> getPropertyGraphFactoryFieldNames() { return propertyGraphFactoryFieldNames; } protected Map<Class, String> getPropertyIdFieldNames() { return propertyIdFieldNames; } protected Map<Class, String> getPropertyLabelsFieldNames() { return propertyLabelsFieldNames; } protected Map<Class, String> getPropertyTitleFieldNames() { return propertyTitleFieldNames; } protected Map<Class, String> getPropertyCategoryFieldNames() { return propertyCategoryFieldNames; } protected Map<Class, String> getPropertyDescriptionFieldNames() { return propertyDescriptionFieldNames; } protected String getDefinitionId(final Class<?> type) { return BindableAdapterUtils.getDefinitionId(type); } private String getIdField(final T pojo) { return getPropertyIdFieldNames().get(pojo.getClass()); } }
/* * Copyright (c) 2015,robinjim(robinjim@126.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.robin.core.base.dao; import java.io.Serializable; import java.util.List; import java.util.Map; import org.springframework.jdbc.core.RowMapper; import com.robin.core.base.exception.DAOException; import com.robin.core.base.model.BaseObject; import com.robin.core.query.util.PageQuery; import com.robin.core.query.util.QueryString; public interface BaseGenricDao<T extends BaseObject,ID extends Serializable> { T get(ID id) throws DAOException; T load(ID id) throws DAOException; void save(T obj) throws DAOException; void update(T obj) throws DAOException; /** * count All * * @return count of record * @throws DAOException */ long count() throws DAOException; /** * count record By one queryField * * @param fieldName * @param fieldValue * @return * @throws DAOException */ long countByField(String fieldName, Object fieldValue) throws DAOException; /** * remove record By primary Key * * @param id * @throws DAOException */ int remove(ID id) throws DAOException; /** * remove all record * * @throws DAOException */ int removeAll() throws DAOException; /** * remove record By Key Array * * @param ids key Array * @throws DAOException */ int removeAll(Serializable[] ids) throws DAOException; /** * remove record by one column * @param fieldName * @param fieldValue * @throws DAOException */ int removeByField(String fieldName, Object fieldValue) throws DAOException; /** * find All record to ModelVO * @return all record * @throws DAOException */ List<T> findAll() throws DAOException; /** * query record by one column * @param fieldName * @param fieldValue * @return List<T> * @throws DAOException */ List<T> findByField(String fieldName, Object fieldValue) throws DAOException; /** * query using query config File * @param queryString * @return * @throws DAOException */ void queryBySelectId(PageQuery queryString) throws DAOException; /** * save ModelVO to DB * @param obj * @throws DAOException */ void saveOrUpdate(final Object obj) throws DAOException; /** * query result return integer * @param sql * @return * @throws DAOException */ int queryForInt(String sql) throws DAOException; /** * return tableName */ String getTableName(); /** * execute Hibernate Hql * @param hql * @return * @throws DAOException */ List<T> findByHql(String hql) throws DAOException; /** * return Query result * @param sql * @return * @throws DAOException */ List<Map<String,Object>> queryBySql(String sql) throws DAOException; /** * return result with defined RowMapper * @param sql * @param rowMapper * @return * @throws DAOException */ List<?> queryByRowWapper(String sql,RowMapper<?> rowMapper) throws DAOException; /** * query * @param fieldName columnProperty * @param fieldValue value * @param orderName order columnProperty * @param ascending is ascending * @return * @throws DAOException */ List<T> findByField(String fieldName, Object fieldValue,String orderName,boolean ascending) throws DAOException; /** * * @param fieldName * @param fieldValue * @return * @throws DAOException */ List<T> findByFields(String[] fieldName, Object[] fieldValue) throws DAOException; /** * * @param fieldName * @param fieldValue * @param orderName * @param ascending * @return * @throws DAOException */ List<T> findByFields(String[] fieldName, Object[] fieldValue,String orderName,boolean ascending) throws DAOException; /** * * @param fieldName * @param fieldValue * @param orderName * @param ascending * @return * @throws DAOException */ List<T> findByFields(String[] fieldName, Object[] fieldValue,String[] orderName,boolean[] ascending) throws DAOException; /** * * @param sql * @throws DAOException */ @Deprecated int executeSqlUpdate(final String sql) throws DAOException; /** * * @param sql * @param resultList * @throws DAOException */ void batchUpdate(String sql,List<Map<String,String>> resultList,List<Map<String,String>> columnTypeMapList) throws DAOException; /** * HQL Query * @param hql * @param startpox * @param pageSize * @return * @throws DAOException */ List<T> findByHqlPage(final String hql, final int startpox,final int pageSize) throws DAOException; /** * find result using page * @param fieldName * @param fieldValue * @param startpos * @param pageSize * @return * @throws DAOException */ List<T> findByFieldPage(final String fieldName, final Object fieldValue,final int startpos,final int pageSize) throws DAOException ; /** * * @param fieldName * @param fieldValue * @param orderName * @param ascending * @param startpos * @param pageSize * @return * @throws DAOException */ List<T> findByFieldPage(final String fieldName,final Object fieldValue, String orderName, boolean ascending,final int startpos,final int pageSize) throws DAOException ; /** * query with more field and value by and * @param fieldName * @param fieldValue * @param startpos * @param pageSize * @return * @throws DAOException */ List<T> findByFieldsPage(final String[] fieldName,final Object[] fieldValue,final int startpos,final int pageSize) throws DAOException ; /** * * @param fieldName * @param fieldValue * @param orderName * @param ascending * @param startpos * @param pageSize * @return * @throws DAOException */ List<T> findByFieldsPage(final String[] fieldName,final Object[] fieldValue, String orderName, boolean ascending,final int startpos,final int pageSize) throws DAOException ; /** * * @param clazz * @param sql * @param values * @return */ Object queryBySingle(Class<?> clazz,String sql,Object[] values) throws DAOException; /** * * @param sql * @param args * @return * @throws DAOException */ List<Map<String,Object>> queryBySql(String sql,Object[] args) throws DAOException; List<T> findByNamedParam(String hql,String[] fieldName,Object[] fieldValue) throws DAOException; void removeByFields(String[] fieldName, Object[] fieldValue) throws DAOException; /** * * @param querySQL * @param countSql * @param displayname * @param pageQuery * @return * @throws DAOException */ PageQuery queryBySql(String querySQL,String countSql,String[] displayname,PageQuery pageQuery)throws DAOException; /** * * @param qs * @param pageQuery * @return * @throws DAOException */ void queryByParamter(QueryString qs, PageQuery pageQuery) throws DAOException; /** * * @param pageQuery * @throws DAOException */ int executeBySelectId(PageQuery pageQuery) throws DAOException; }
/** * Copyright 2005-2015 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.kubernetes.api.extensions; import com.fasterxml.jackson.databind.ObjectMapper; import io.fabric8.kubernetes.api.KubernetesHelper; import io.fabric8.kubernetes.api.model.HasMetadata; import io.fabric8.kubernetes.api.model.KubernetesList; import io.fabric8.openshift.api.model.Parameter; import io.fabric8.openshift.api.model.Template; import io.fabric8.utils.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; /** * Helper class for working with OpenShift Templates */ public class Templates { private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final transient Logger LOG = LoggerFactory.getLogger(Templates.class); /** * Allows a list of resources to be combined into a single Template if one or more templates are contained inside the list * or just return the unchanged list if no templates are present. */ public static Object combineTemplates(KubernetesList kubernetesList) { Template firstTemplate = null; List<HasMetadata> items = kubernetesList.getItems(); for (HasMetadata item : items) { if (item instanceof Template) { Template template = (Template) item; if (firstTemplate == null) { firstTemplate = template; } else { firstTemplate = combineTemplates(firstTemplate, template); } } } if (firstTemplate != null) { for (HasMetadata object : items) { if (!(object instanceof Template)) { addTemplateObject(firstTemplate, object); } } } return firstTemplate != null ? firstTemplate : kubernetesList; } public static Template combineTemplates(Template firstTemplate, Template template) { List<HasMetadata> objects = template.getObjects(); if (objects != null) { for (HasMetadata object : objects) { addTemplateObject(firstTemplate, object); } } List<Parameter> parameters = firstTemplate.getParameters(); if (parameters == null) { parameters = new ArrayList<>(); firstTemplate.setParameters(parameters); } combineParameters(parameters, template.getParameters()); String name = KubernetesHelper.getName(template); if (Strings.isNotBlank(name)) { // lets merge all the fabric8 annotations using the template id qualifier as a postfix Map<String, String> annotations = KubernetesHelper.getOrCreateAnnotations(firstTemplate); Map<String, String> otherAnnotations = KubernetesHelper.getOrCreateAnnotations(template); Set<Map.Entry<String, String>> entries = otherAnnotations.entrySet(); for (Map.Entry<String, String> entry : entries) { String key = entry.getKey(); String value = entry.getValue(); if (!annotations.containsKey(key)) { annotations.put(key, value); } } } return firstTemplate; } protected static void combineParameters(List<Parameter> parameters, List<Parameter> otherParameters) { if (otherParameters != null && otherParameters.size() > 0) { Map<String, Parameter> map = new HashMap<>(); for (Parameter parameter : parameters) { map.put(parameter.getName(), parameter); } for (Parameter otherParameter : otherParameters) { String name = otherParameter.getName(); Parameter original = map.get(name); if (original == null) { parameters.add(otherParameter); } else { if (Strings.isNotBlank(original.getValue())) { original.setValue(otherParameter.getValue()); } } } } } public static void addTemplateObject(Template template, HasMetadata object) { List<HasMetadata> objects = template.getObjects(); objects.add(object); template.setObjects(objects); } /** * If we have any templates inside the items then lets unpack them and combine any parameters */ public static Object combineTemplates(KubernetesList kubernetesList, List<HasMetadata> items) { Template template = null; for (HasMetadata item : items) { if (item instanceof Template) { Template aTemplate = (Template) item; if (template == null) { template = aTemplate; } else { template = combineTemplates(template, aTemplate); } } } if (template != null) { // lets move all the content into the template for (HasMetadata item : items) { if (!(item instanceof Template)) { addTemplateObject(template, item); } } List<HasMetadata> objects = template.getObjects(); return template; } else { return kubernetesList; } } /** * Lets allow template parameters to be overridden with a Properties object */ public static void overrideTemplateParameters(Template template, Map<String, String> properties, String propertyNamePrefix) { List<Parameter> parameters = template.getParameters(); if (parameters != null && properties != null) { boolean missingProperty = false; for (Parameter parameter : parameters) { String parameterName = parameter.getName(); String name = propertyNamePrefix + parameterName; String propertyValue = properties.get(name); if (Strings.isNotBlank(propertyValue)) { LOG.info("Overriding template parameter " + name + " with value: " + propertyValue); parameter.setValue(propertyValue); } else { missingProperty = true; LOG.info("No property defined for template parameter: " + name); } } if (missingProperty) { LOG.debug("current properties " + new TreeSet<>(properties.keySet())); } } } /** * Lets locally process the templates so that we can process templates on any kubernetes environment */ public static KubernetesList processTemplatesLocally(Template entity, boolean failOnMissingParameterValue) throws IOException { List<HasMetadata> objects = null; if (entity != null) { objects = entity.getObjects(); if (objects == null || objects.isEmpty()) { return null; } } List<Parameter> parameters = entity != null ? entity.getParameters() : null; if (parameters != null && !parameters.isEmpty()) { String json = "{\"kind\": \"List\", \"apiVersion\": \"" + KubernetesHelper.defaultApiVersion + "\",\n" + " \"items\": " + KubernetesHelper.toJson(objects) + " }"; // lets make a few passes in case there's expressions in values for (int i = 0; i < 5; i++) { for (Parameter parameter : parameters) { String name = parameter.getName(); String regex = "${" + name + "}"; String value = parameter.getValue(); // TODO generate random strings for passwords etc! if (Strings.isNullOrBlank(value)) { if (failOnMissingParameterValue) { throw new IllegalArgumentException("No value available for parameter name: " + name); } else { value = ""; } } json = Strings.replaceAllWithoutRegex(json, regex, value); } } return OBJECT_MAPPER.reader(KubernetesList.class).readValue(json); } else { KubernetesList answer = new KubernetesList(); answer.setItems(objects); return answer; } } }
package uk.co.sentinelweb.tvmod.microserver; import android.support.annotation.NonNull; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.StringWriter; import java.net.MalformedURLException; import java.net.URLDecoder; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import jcifs.smb.SmbException; import jcifs.smb.SmbFile; import uk.co.sentinelweb.microserver.server.MimeMap; import uk.co.sentinelweb.microserver.server.RequestData; import uk.co.sentinelweb.microserver.server.cp.CommandProcessor; /** * Streams via HTTP */ public class SmbStreamCommandProcessor extends CommandProcessor { private boolean close = false; static ArrayList<SmbStreamCommandProcessor> activeRequests = new ArrayList<>(); OutputStream outputStream; public SmbStreamCommandProcessor() { super("/s/"); _handleHeaders = true; singleton = false; } @Override public String processCommand(final RequestData req) { activeRequests.add(this); final HashMap<String, String> params = req.getParams(); System.out.println("SmbStreamCP:url:" + req.getPath()); final boolean dl = params.get("dl") != null; final String smbpath = URLDecoder.decode(req.getPath().substring(3, req.getPath().length())); final MimeMap.MimeData mimeRec = MimeMap.get(smbpath); System.out.println("SmbStreamCP:pathdoecoded:" + smbpath); try { final SmbFile file = new SmbFile(smbpath); outputStream = req.getOutputStream(); if (file == null || !file.exists()) {// not found if (mimeTypeIsNotMedia(mimeRec)) {// TODO send redirect to file return return404(); } return return404(); } else {//process //req.getRequestProcessor().setName("SCP:" + path); System.out.println("StreamCP:>>>>>>>>>>>>>>new request >>>>>>>>>>>" + this.hashCode() + "(active:" + activeRequests.size() + ")" + " first:" + activeRequests.get(0).hashCode()); System.out.println("StreamCP:method:" + req.getMethod()); for (final String key : req.getHeaders().keySet()) { System.out.println("StreamCP:hdr:" + key + "=" + req.getHeaders().get(key)); } final long startTime = System.currentTimeMillis(); int status = 200; final ArrayList<String> extraHeaders = new ArrayList<>(); extraHeaders.add("HTTP/1.1 " + status + " " + (status == 200 ? "OK" : "Partial Content") + "\r\n"); final String etag = "" + file.getPath().length(); extraHeaders.add("ETag:\"" + etag + "-" + file.lastModified() + "\"" + "\r\n"); final String connHeader = req.getHeaders().get("connection"); //if (connHeader==null) {connHeader = req.getHeaders().get("connection");} if (connHeader != null && connHeader.toLowerCase().trim().indexOf("keep-alive") == 0) { extraHeaders.add("Keep-Alive: timeout=15, max=100" + "\r\n"); extraHeaders.add("Connection: Keep-Alive" + "\r\n"); } else if (connHeader != null && connHeader.toLowerCase().trim().indexOf("close") == 0) { extraHeaders.add("Connection: close" + "\r\n"); } long contlen = file.length(); String type = mimeRec.mimeType; if (req.isHead()) { extraHeaders.add("Date: Thu, 18 Feb 2010 17:01:17 GMT\r\n"); extraHeaders.add("Server: MyPOD Android" + "\r\n"); extraHeaders.add("Last-Modified:" + new Date().toGMTString() + "\r\n"); extraHeaders.add("Content-Length:" + contlen + "\r\n"); if (dl) { type = "application/x-octet-stream"; extraHeaders.add("Content-Type:" + type + "\r\n"); } else { extraHeaders.add("Content-Type:" + type + "\r\n"); } writeHeaders(outputStream, extraHeaders); } else { long start = 0; long end = file.length(); String rangeHeader = req.getHeaders().get("range");//Range: bytes=7864320-7999999 if (connHeader != null && connHeader.toLowerCase().trim().indexOf("close") == 0) { System.out.println(this.hashCode() + "(" + activeRequests.size() + ")" + "close req"); if (activeRequests.size() > 1) { System.out.println("StreamCP:" + this.hashCode() + "(" + activeRequests.size() + ")" + "closing " + activeRequests.get(0).hashCode()); //activeRequests.get(0).cancel(); //end = 100; } } else if (rangeHeader != null && !dl) {// assumed to be in bytes // range header parsing status = 206; rangeHeader = rangeHeader.trim(); final String[] eqSplit = rangeHeader.split("="); if (eqSplit.length == 2) { final String[] dashSplit = eqSplit[1].split("-"); if (dashSplit.length > 0) { try { start = Long.parseLong(dashSplit[0]); if (dashSplit.length > 1) { end = Long.parseLong(dashSplit[1]); } else { end = file.length() - 1;//last byte } contlen = end - start + 1; } catch (final NumberFormatException e) { System.out.println("-" + this.hashCode() + "StreamCP: couln't parse Range header '" + rangeHeader + "'"); } } } } extraHeaders.clear(); if (dl) { extraHeaders.add("HTTP/1.1 200 OK" + "\r\n"); type = "application/octet-stream"; extraHeaders.add("Content-Type:" + type + "\r\n"); final String fileName = file.getName(); final String ua = req.getHeaders().get("user-agent"); if (ua != null && ua.toLowerCase().indexOf("playstation") > -1) { } else { extraHeaders.add("Content-Length:" + contlen + "\r\n"); } System.out.println("StreamCP:filename:" + fileName); extraHeaders.add("Content-disposition:attachment; filename=" + fileName + "\r\n"); } else { extraHeaders.add("HTTP/1.1 " + status + " " + (status == 200 ? "OK" : "Partial Content") + "\r\n"); extraHeaders.add("ETag: \"" + etag + "-" + file.lastModified() + "\"" + "\r\n"); extraHeaders.add("Date: " + (new Date()).toGMTString() + "\r\n"); extraHeaders.add("Server: MyPOD Android" + "\r\n"); extraHeaders.add("Last-Modified: " + new Date(file.lastModified()).toGMTString() + "\r\n"); extraHeaders.add("Accept-Ranges:bytes" + "\r\n"); extraHeaders.add("Content-Length: " + contlen + "\r\n"); extraHeaders.add("Content-Type: " + type + "\r\n"); extraHeaders.add("Connection: close\r\n"); if (rangeHeader != null) { extraHeaders.add("Content-Range:bytes " + start + "-" + end + "/" + (file.length()) + "\r\n"); } } writeHeaders(outputStream, extraHeaders); // set thread name for testing //getRequestProcessor().setName("SCP:" + file.getName() + ":" + status); // write stream final long openTime = System.currentTimeMillis(); final int buffSize = 100000; final byte[] b = new byte[buffSize]; long pos = start; long written = 0; int bytesRead = 0; final InputStream is = file.getInputStream();//new FileInputStream(file); if (start > 0) { is.skip(start); } System.out.println("StreamCP:start writing out data:" + this.hashCode() + "(" + activeRequests.size() + ")" + "filesize=" + file.length() + ": start:" + start + ": en:" + end + " status:" + status); try { while ((bytesRead = is.read(b, 0, buffSize)) > -1 && !close && !cancel) { int bytesToWrite = bytesRead; if (end - pos < bytesRead) {//WTF does this do? seems to stop writing out past end of file bytesToWrite = (int) (end - pos + 1); } outputStream.write(b, 0, bytesToWrite); pos += bytesToWrite; written += bytesToWrite; outputStream.flush(); if (pos > end) { break; } } } catch (final Exception e) { System.err.println("StreamCP:" + this.hashCode() + "(" + activeRequests.size() + ")" + e.getMessage()); e.printStackTrace(System.err); } final long endTime = System.currentTimeMillis(); System.out.println("StreamCP:" + this.hashCode() + "(" + activeRequests.size() + ")" + "finished... pos:" + pos + " written:" + written + " opentime:" + (openTime - startTime) + ": serveTime:" + (endTime - openTime)); is.close(); } } } catch (final MalformedURLException e) { e.printStackTrace(); } catch (final SmbException e) { e.printStackTrace(); } catch (final FileNotFoundException e) { System.err.println("StreamCP:" + this.hashCode() + "(" + activeRequests.size() + ")" + e.getMessage()); e.printStackTrace(System.err); } catch (final IOException e) { System.err.println("StreamCP:" + this.hashCode() + "(" + activeRequests.size() + ")" + e.getMessage()); e.printStackTrace(System.err); } System.out.println("StreamCP: reqmove active:" + this.hashCode() + "(" + activeRequests.size() + ")"); activeRequests.remove(this); return ""; } public boolean mimeTypeIsNotMedia(final MimeMap.MimeData mimeRec) { return mimeRec == null || !(mimeRec.mediaType.equals(MimeMap.VIDEO) || mimeRec.mediaType.equals(MimeMap.AUDIO_VIDEO) || mimeRec.mediaType.equals(MimeMap.AUDIO)); } @NonNull public String return404() { final ArrayList<String> extraHeaders = new ArrayList<>(); extraHeaders.add("HTTP/1.1 " + 404 + " file not found" + "\r\n"); try { writeHeaders(outputStream, extraHeaders); } catch (final IOException e) { e.printStackTrace(); } activeRequests.remove(this); return ""; } private void writeHeaders(final OutputStream out, final ArrayList<String> extra) throws IOException { final StringWriter sw = new StringWriter(); for (final String extr : extra) { sw.write(extr); System.out.println("StreamCP:outheader:" + extr); } sw.write("\r\n"); sw.flush(); out.write(sw.toString().getBytes()); sw.close(); out.flush(); } @Override public void release() { // TODO Auto-generated method stub } public void cancel() { try { outputStream.close(); getRequestProcessor().close(); System.out.println("StreamCP: closed:" + this.hashCode() + "(" + activeRequests.size() + ")"); } catch (final IOException e) { System.err.println("StreamCP: cancel:" + this.hashCode() + "(" + activeRequests.size() + ")" + e.getMessage()); e.printStackTrace(System.err); } close = true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.core.protocol.core.impl.wireformat; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.TransportConfiguration; import org.apache.activemq.artemis.core.protocol.core.impl.PacketImpl; public class NodeAnnounceMessage extends PacketImpl { protected String nodeID; protected String backupGroupName; protected boolean backup; protected long currentEventID; protected TransportConfiguration connector; protected TransportConfiguration backupConnector; private String scaleDownGroupName; // Static -------------------------------------------------------- // Constructors -------------------------------------------------- public NodeAnnounceMessage(final long currentEventID, final String nodeID, final String backupGroupName, final String scaleDownGroupName, final boolean backup, final TransportConfiguration tc, final TransportConfiguration backupConnector) { super(NODE_ANNOUNCE); this.currentEventID = currentEventID; this.nodeID = nodeID; this.backupGroupName = backupGroupName; this.backup = backup; this.connector = tc; this.backupConnector = backupConnector; this.scaleDownGroupName = scaleDownGroupName; } public NodeAnnounceMessage() { super(NODE_ANNOUNCE); } public NodeAnnounceMessage(byte nodeAnnounceMessage_V2) { super(nodeAnnounceMessage_V2); } // Public -------------------------------------------------------- public String getNodeID() { return nodeID; } public String getBackupGroupName() { return backupGroupName; } public boolean isBackup() { return backup; } public TransportConfiguration getConnector() { return connector; } public TransportConfiguration getBackupConnector() { return backupConnector; } public String getScaleDownGroupName() { return scaleDownGroupName; } /** * @return the currentEventID */ public long getCurrentEventID() { return currentEventID; } @Override public void encodeRest(final ActiveMQBuffer buffer) { buffer.writeString(nodeID); buffer.writeNullableString(backupGroupName); buffer.writeBoolean(backup); buffer.writeLong(currentEventID); if (connector != null) { buffer.writeBoolean(true); connector.encode(buffer); } else { buffer.writeBoolean(false); } if (backupConnector != null) { buffer.writeBoolean(true); backupConnector.encode(buffer); } else { buffer.writeBoolean(false); } buffer.writeNullableString(scaleDownGroupName); } @Override public void decodeRest(final ActiveMQBuffer buffer) { this.nodeID = buffer.readString(); this.backupGroupName = buffer.readNullableString(); this.backup = buffer.readBoolean(); this.currentEventID = buffer.readLong(); if (buffer.readBoolean()) { connector = new TransportConfiguration(); connector.decode(buffer); } if (buffer.readBoolean()) { backupConnector = new TransportConfiguration(); backupConnector.decode(buffer); } scaleDownGroupName = buffer.readNullableString(); } @Override public String toString() { return "NodeAnnounceMessage [backup=" + backup + ", connector=" + connector + ", nodeID=" + nodeID + ", toString()=" + super.toString() + "]"; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + (backup ? 1231 : 1237); result = prime * result + ((backupConnector == null) ? 0 : backupConnector.hashCode()); result = prime * result + ((connector == null) ? 0 : connector.hashCode()); result = prime * result + (int)(currentEventID ^ (currentEventID >>> 32)); result = prime * result + ((nodeID == null) ? 0 : nodeID.hashCode()); result = prime * result + ((scaleDownGroupName == null) ? 0 : scaleDownGroupName.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (!(obj instanceof NodeAnnounceMessage)) { return false; } NodeAnnounceMessage other = (NodeAnnounceMessage)obj; if (backup != other.backup) { return false; } if (backupConnector == null) { if (other.backupConnector != null) { return false; } } else if (!backupConnector.equals(other.backupConnector)) { return false; } if (connector == null) { if (other.connector != null) { return false; } } else if (!connector.equals(other.connector)) { return false; } if (currentEventID != other.currentEventID) { return false; } if (nodeID == null) { if (other.nodeID != null) { return false; } } else if (!nodeID.equals(other.nodeID)) { return false; } else if (!scaleDownGroupName.equals(other.scaleDownGroupName)) { return false; } return true; } }
package com.jaquadro.minecraft.gardentrees.block; import com.jaquadro.minecraft.gardencore.api.WoodRegistry; import com.jaquadro.minecraft.gardenapi.api.connect.IChainSingleAttachable; import com.jaquadro.minecraft.gardencore.util.UniqueMetaIdentifier; import com.jaquadro.minecraft.gardentrees.block.tile.TileEntityWoodProxy; import com.jaquadro.minecraft.gardentrees.core.ClientProxy; import com.jaquadro.minecraft.gardentrees.core.ModBlocks; import com.jaquadro.minecraft.gardentrees.core.ModCreativeTabs; import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import net.minecraft.block.Block; import net.minecraft.block.BlockContainer; import net.minecraft.block.BlockLeavesBase; import net.minecraft.block.BlockTorch; import net.minecraft.block.material.Material; import net.minecraft.client.particle.EffectRenderer; import net.minecraft.client.particle.EntityDiggingFX; import net.minecraft.client.renderer.texture.IIconRegister; import net.minecraft.creativetab.CreativeTabs; import net.minecraft.entity.Entity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.Blocks; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.IIcon; import net.minecraft.util.MovingObjectPosition; import net.minecraft.util.Vec3; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; import net.minecraftforge.common.util.ForgeDirection; import java.util.ArrayList; import java.util.List; import java.util.Map.Entry; import java.util.Random; public class BlockThinLog extends BlockContainer implements IChainSingleAttachable { public static final String[] subNames = new String[] { "oak", "spruce", "birch", "jungle", "acacia", "big_oak" }; // Scratch state variable for rendering purposes // 0 = Y, 1 = Z, 2 = X, 3 = BARK private int orientation; public BlockThinLog (String blockName) { super(Material.wood); setCreativeTab(ModCreativeTabs.tabGardenTrees); setHardness(1.5f); setResistance(5f); setLightOpacity(0); setStepSound(Block.soundTypeWood); setBlockName(blockName); setBlockBoundsForItemRender(); } public float getMargin () { return 0.25f; } public void setOrientation (int orientation) { this.orientation = orientation; } @Override public void setBlockBoundsForItemRender () { float margin = getMargin(); setBlockBounds(margin, 0, margin, 1 - margin, 1, 1 - margin); } @Override public void addCollisionBoxesToList (World world, int x, int y, int z, AxisAlignedBB mask, List list, Entity colliding) { int connectFlags = calcConnectionFlags(world, x, y, z); float margin = getMargin(); float ys = (connectFlags & 1) != 0 ? 0 : margin; float ye = (connectFlags & 2) != 0 ? 1 : 1 - margin; float zs = (connectFlags & 4) != 0 ? 0 : margin; float ze = (connectFlags & 8) != 0 ? 1 : 1 - margin; float xs = (connectFlags & 16) != 0 ? 0 : margin; float xe = (connectFlags & 32) != 0 ? 1 : 1 - margin; setBlockBounds(xs, ys, zs, xe, ye, ze); super.addCollisionBoxesToList(world, x, y, z, mask, list, colliding); } @Override public void setBlockBoundsBasedOnState (IBlockAccess world, int x, int y, int z) { int connectFlags = calcConnectionFlags(world, x, y, z); float margin = getMargin(); float ys = (connectFlags & 1) != 0 ? 0 : margin; float ye = (connectFlags & 2) != 0 ? 1 : 1 - margin; float zs = (connectFlags & 4) != 0 ? 0 : margin; float ze = (connectFlags & 8) != 0 ? 1 : 1 - margin; float xs = (connectFlags & 16) != 0 ? 0 : margin; float xe = (connectFlags & 32) != 0 ? 1 : 1 - margin; setBlockBounds(xs, ys, zs, xe, ye, ze); } @Override public int quantityDropped (Random random) { return 1; } @Override public boolean isOpaqueCube () { return false; } @Override public boolean renderAsNormalBlock () { return false; } @Override public boolean canPlaceTorchOnTop (World world, int x, int y, int z) { return true; } @Override public int getRenderType () { return ClientProxy.thinLogRenderID; } @Override public boolean shouldSideBeRendered (IBlockAccess blockAccess, int x, int y, int z, int side) { return true; } @Override public void breakBlock (World world, int x, int y, int z, Block block, int meta) { byte range = 4; int height = range + 1; if (world.checkChunksExist(x - height, y - height, z - height, x + height, y + height, z + height)) { for (int dx = -range; dx <= range; dx++) { for (int dy = -range; dy <= range; dy++) { for (int dz = -range; dz <= range; dz++) { Block leaf = world.getBlock(x + dx, y + dy, z + dz); if (leaf.isLeaves(world, x + dx, y + dy, z + dz)) leaf.beginLeavesDecay(world, x + dx, y + dy, z + dz); } } } } super.breakBlock(world, x, y, z, block, meta); } @Override public boolean removedByPlayer (World world, EntityPlayer player, int x, int y, int z, boolean willHarvest) { if (willHarvest) return true; return super.removedByPlayer(world, player, x, y, z, willHarvest); } @Override public void harvestBlock (World world, EntityPlayer player, int x, int y, int z, int meta) { super.harvestBlock(world, player, x, y, z, meta); world.setBlockToAir(x, y, z); } @Override public int damageDropped (int meta) { return meta; } @Override public ArrayList<ItemStack> getDrops (World world, int x, int y, int z, int metadata, int fortune) { TileEntityWoodProxy tile = getTileEntity(world, x, y, z); ArrayList<ItemStack> ret = new ArrayList<ItemStack>(); int count = quantityDropped(metadata, fortune, world.rand); for(int i = 0; i < count; i++) { Item item = getItemDropped(metadata, world.rand, fortune); if (item != null) { int damage = damageDropped(metadata); if (tile != null && tile.getProtoBlock() != null) damage = TileEntityWoodProxy.composeMetadata(tile.getProtoBlock(), tile.getProtoMeta()); ItemStack stack = new ItemStack(item, 1, damage); ret.add(stack); } } return ret; } public int calcConnectionFlags (IBlockAccess world, int x, int y, int z) { int flagsY = calcConnectYFlags(world, x, y, z); int flagsZNeg = calcConnectYFlags(world, x, y, z - 1); int flagsZPos = calcConnectYFlags(world, x, y, z + 1); int flagsXNeg = calcConnectYFlags(world, x - 1, y, z); int flagsXPos = calcConnectYFlags(world, x + 1, y, z); int connectFlagsY = flagsY & 3; int connectFlagsZNeg = flagsZNeg & 3; int connectFlagsZPos = flagsZPos & 3; int connectFlagsXNeg = flagsXNeg & 3; int connectFlagsXPos = flagsXPos & 3; Block blockZNeg = world.getBlock(x, y, z - 1); Block blockZPos = world.getBlock(x, y, z + 1); Block blockXNeg = world.getBlock(x - 1, y, z); Block blockXPos = world.getBlock(x + 1, y, z); boolean hardZNeg = isNeighborHardConnection(world, x, y, z - 1, blockZNeg, ForgeDirection.NORTH) || blockZNeg instanceof BlockTorch; boolean hardZPos = isNeighborHardConnection(world, x, y, z + 1, blockZPos, ForgeDirection.SOUTH) || blockZPos instanceof BlockTorch; boolean hardXNeg = isNeighborHardConnection(world, x - 1, y, z, blockXNeg, ForgeDirection.WEST) || blockXNeg instanceof BlockTorch; boolean hardXPos = isNeighborHardConnection(world, x + 1, y, z, blockXPos, ForgeDirection.EAST) || blockXPos instanceof BlockTorch; boolean hardConnection = (flagsY & 4) != 0; boolean hardConnectionZNeg = hardConnection && (flagsZNeg & 4) != 0; boolean hardConnectionZPos = hardConnection && (flagsZPos & 4) != 0; boolean hardConnectionXNeg = hardConnection && (flagsXNeg & 4) != 0; boolean hardConnectionXPos = hardConnection && (flagsXPos & 4) != 0; boolean connectZNeg = (connectFlagsY == 0 && hardZNeg) || (blockZNeg == this && !hardConnectionZNeg && (connectFlagsY != 3 || connectFlagsZNeg != 3)); boolean connectZPos = (connectFlagsY == 0 && hardZPos) || (blockZPos == this && !hardConnectionZPos && (connectFlagsY != 3 || connectFlagsZPos != 3)); boolean connectXNeg = (connectFlagsY == 0 && hardXNeg) || (blockXNeg == this && !hardConnectionXNeg && (connectFlagsY != 3 || connectFlagsXNeg != 3)); boolean connectXPos = (connectFlagsY == 0 && hardXPos) || (blockXPos == this && !hardConnectionXPos && (connectFlagsY != 3 || connectFlagsXPos != 3)); boolean connectSide = connectZNeg | connectZPos | connectXNeg | connectXPos; if (!connectSide && (connectFlagsY & 1) == 0) { if (hardZNeg) connectZNeg = true; if (hardZPos) connectZPos = true; if (hardXNeg) connectXNeg = true; if (hardXPos) connectXPos = true; } if (!(connectZNeg | connectZPos | connectXNeg | connectXPos)) connectFlagsY = 3; if (connectFlagsY == 2 && hardZNeg) connectZNeg = true; if (connectFlagsY == 2 && hardZPos) connectZPos = true; if (connectFlagsY == 2 && hardXNeg) connectXNeg = true; if (connectFlagsY == 2 && hardXPos) connectXPos = true; return connectFlagsY | (connectZNeg ? 4 : 0) | (connectZPos ? 8 : 0) | (connectXNeg ? 16 : 0) | (connectXPos ? 32 : 0); } private int calcConnectYFlags (IBlockAccess world, int x, int y, int z) { Block block = world.getBlock(x, y, z); if (block != this) return 0; Block blockYNeg = world.getBlock(x, y - 1, z); boolean hardYNeg = isNeighborHardConnectionY(world, x, y - 1, z, blockYNeg, ForgeDirection.DOWN); boolean connectYNeg = hardYNeg || blockYNeg == this; Block blockYPos = world.getBlock(x, y + 1, z); boolean hardYPos = isNeighborHardConnectionY(world, x, y + 1, z, blockYPos, ForgeDirection.UP); boolean connectYPos = hardYPos || blockYPos == this|| blockYPos instanceof BlockTorch; return (connectYNeg ? 1 : 0) | (connectYPos ? 2 : 0) | (hardYNeg ? 4 : 0) | (hardYPos ? 8 : 0); } private boolean isNeighborHardConnection (IBlockAccess world, int x, int y, int z, Block block, ForgeDirection side) { if (block.getMaterial().isOpaque() && block.renderAsNormalBlock()) return true; if (block.isSideSolid(world, x, y, z, side.getOpposite())) return true; //if (block == ModBlocks.largePot) // return true; return false; } private boolean isNeighborHardConnectionY (IBlockAccess world, int x, int y, int z, Block block, ForgeDirection side) { if (isNeighborHardConnection(world, x, y, z, block, side)) return true; return block instanceof BlockLeavesBase || block == ModBlocks.thinLogFence; } @Override public void getSubBlocks (Item item, CreativeTabs creativeTabs, List blockList) { for (int i = 0; i < 6; i++) blockList.add(new ItemStack(item, 1, i)); for (Entry<UniqueMetaIdentifier, Block> entry : WoodRegistry.instance().registeredTypes()) { if (entry.getValue() == Blocks.log || entry.getValue() == Blocks.log2) continue; int id = TileEntityWoodProxy.composeMetadata(entry.getValue(), entry.getKey().meta); blockList.add(new ItemStack(item, 1, id)); } } @SideOnly(Side.CLIENT) @Override public IIcon getIcon (int side, int meta) { int ometa = 0; if (orientation == 1) ometa |= 8; else if (orientation == 2) ometa |= 4; else if (orientation == 3) ometa |= 12; int protoMeta = TileEntityWoodProxy.getMetaFromComposedMetadata(meta); Block protoBlock = TileEntityWoodProxy.getBlockFromComposedMetadata(meta); if (protoBlock == null) protoBlock = getIconSource(meta); return protoBlock.getIcon(side, protoMeta | ometa); } @SideOnly(Side.CLIENT) @Override public IIcon getIcon (IBlockAccess blockAccess, int x, int y, int z, int side) { TileEntityWoodProxy te = getTileEntity(blockAccess, x, y, z); if (te == null || te.getProtoBlock() == null) return super.getIcon(blockAccess, x, y, z, side); int ometa = 0; if (orientation == 1) ometa |= 8; else if (orientation == 2) ometa |= 4; else if (orientation == 3) ometa |= 12; int protoMeta = te.getProtoMeta(); Block protoBlock = te.getProtoBlock(); if (protoBlock == null) protoBlock = Blocks.log; return protoBlock.getIcon(side, protoMeta | ometa); } private Block getIconSource (int meta) { switch (meta / 4) { case 0: return Blocks.log; case 1: return Blocks.log2; default: return Blocks.log; } } @SideOnly(Side.CLIENT) @Override public boolean addHitEffects (World worldObj, MovingObjectPosition target, EffectRenderer effectRenderer) { TileEntityWoodProxy te = getTileEntity(worldObj, target.blockX, target.blockY, target.blockZ); BlockThinLog block = getBlock(worldObj, target.blockX, target.blockY, target.blockZ); if (te == null || block == null) return false; int protoMeta = te.getProtoMeta(); Block protoBlock = te.getProtoBlock(); if (protoBlock == null) { protoBlock = Blocks.log; protoMeta = worldObj.getBlockMetadata(target.blockX, target.blockY, target.blockZ); } float f = 0.1F; double xPos = target.blockX + worldObj.rand.nextDouble() * (block.getBlockBoundsMaxX() - block.getBlockBoundsMinX() - (f * 2.0F)) + f + block.getBlockBoundsMinX(); double yPos = target.blockY + worldObj.rand.nextDouble() * (block.getBlockBoundsMaxY() - block.getBlockBoundsMinY() - (f * 2.0F)) + f + block.getBlockBoundsMinY(); double zPos = target.blockZ + worldObj.rand.nextDouble() * (block.getBlockBoundsMaxZ() - block.getBlockBoundsMinZ() - (f * 2.0F)) + f + block.getBlockBoundsMinZ(); if (target.sideHit == 0) yPos = target.blockY + block.getBlockBoundsMinY() - f; if (target.sideHit == 1) yPos = target.blockY + block.getBlockBoundsMaxY() + f; if (target.sideHit == 2) zPos = target.blockZ + block.getBlockBoundsMinZ() - f; if (target.sideHit == 3) zPos = target.blockZ + block.getBlockBoundsMaxZ() + f; if (target.sideHit == 4) xPos = target.blockX + block.getBlockBoundsMinX() - f; if (target.sideHit == 5) xPos = target.blockX + block.getBlockBoundsMaxX() + f; EntityDiggingFX fx = new EntityDiggingFX(worldObj, xPos, yPos, zPos, 0.0D, 0.0D, 0.0D, block, worldObj.getBlockMetadata(target.blockX, target.blockY, target.blockZ)); fx.applyColourMultiplier(target.blockX, target.blockY, target.blockZ); fx.multiplyVelocity(0.2F).multipleParticleScaleBy(0.6F); fx.setParticleIcon(block.getIcon(worldObj.rand.nextInt(6), te.composeMetadata(protoBlock, protoMeta))); effectRenderer.addEffect(fx); return true; } @SideOnly(Side.CLIENT) @Override public boolean addDestroyEffects (World world, int x, int y, int z, int meta, EffectRenderer effectRenderer) { TileEntityWoodProxy te = getTileEntity(world, x, y, z); BlockThinLog block = getBlock(world, x, y, z); if (te == null || block == null) return false; int protoMeta = te.getProtoMeta(); Block protoBlock = te.getProtoBlock(); if (protoBlock == null) { protoBlock = Blocks.log; protoMeta = world.getBlockMetadata(x, y, z); } try { byte count = 4; for (int ix = 0; ix < count; ++ix) { for (int iy = 0; iy < count; ++iy) { for (int iz = 0; iz < count; ++iz) { double xOff = (double)x + ((double)ix + 0.5D) / (double)count; double yOff = (double)y + ((double)iy + 0.5D) / (double)count; double zOff = (double)z + ((double)iz + 0.5D) / (double)count; EntityDiggingFX fx = new EntityDiggingFX(world, xOff, yOff, zOff, xOff - (double) x - 0.5D, yOff - (double) y - 0.5D, zOff - (double) z - 0.5D, this, meta); fx.setParticleIcon(block.getIcon(world.rand.nextInt(6), te.composeMetadata(protoBlock, protoMeta))); effectRenderer.addEffect(fx.applyColourMultiplier(x, y, z)); } } } } catch (Exception e) { } return true; } private TileEntityWoodProxy getTileEntity (IBlockAccess blockAccess, int x, int y, int z) { TileEntity te = blockAccess.getTileEntity(x, y, z); if (te != null && te instanceof TileEntityWoodProxy) return (TileEntityWoodProxy) te; return null; } private BlockThinLog getBlock (IBlockAccess blockAccess, int x, int y, int z) { Block block = blockAccess.getBlock(x, y, z); if (block != null && block instanceof BlockThinLog) return (BlockThinLog)block; return null; } @Override public boolean canSustainLeaves (IBlockAccess world, int x, int y, int z) { return true; } @Override public TileEntity createNewTileEntity (World world, int meta) { return new TileEntityWoodProxy(); } private final Vec3[] attachPoints = new Vec3[] { Vec3.createVectorHelper(.5, getMargin(), .5), Vec3.createVectorHelper(.5, 1 - getMargin(), .5), Vec3.createVectorHelper(.5, .5, getMargin()), Vec3.createVectorHelper(.5, .5, 1 - getMargin()), Vec3.createVectorHelper(getMargin(), .5, .5), Vec3.createVectorHelper(1 - getMargin(), .5, .5), }; @Override public Vec3 getChainAttachPoint (IBlockAccess blockAccess, int x, int y, int z, int side) { int connectFlags = calcConnectionFlags(blockAccess, x, y, z); switch (side) { case 0: return (connectFlags & 1) == 0 ? attachPoints[0] : null; case 1: return (connectFlags & 2) == 0 ? attachPoints[1] : null; case 2: return (connectFlags & 4) == 0 ? attachPoints[2] : null; case 3: return (connectFlags & 8) == 0 ? attachPoints[3] : null; case 4: return (connectFlags & 16) == 0 ? attachPoints[4] : null; case 5: return (connectFlags & 32) == 0 ? attachPoints[5] : null; } return null; } @Override public void registerBlockIcons (IIconRegister register) { } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BitsFilteredDocIdSet; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocValuesDocIdSet; import org.apache.lucene.search.Filter; import org.apache.lucene.search.FilterCachingPolicy; import org.apache.lucene.util.Bits; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.lucene.HashedBytesRef; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.*; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.util.Map; import static com.google.common.collect.Maps.newHashMap; /** * */ public class ScriptFilterParser implements FilterParser { public static final String NAME = "script"; @Inject public ScriptFilterParser() { } @Override public String[] names() { return new String[]{NAME}; } @Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException { XContentParser parser = parseContext.parser(); ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); XContentParser.Token token; FilterCachingPolicy cache = parseContext.autoFilterCachePolicy(); HashedBytesRef cacheKey = null; // also, when caching, since its isCacheable is false, will result in loading all bit set... String script = null; String scriptLang; Map<String, Object> params = null; String filterName = null; String currentFieldName = null; ScriptService.ScriptType scriptType = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { if ("params".equals(currentFieldName)) { params = parser.map(); } else { throw new QueryParsingException(parseContext.index(), "[script] filter does not support [" + currentFieldName + "]"); } } else if (token.isValue()) { if ("_name".equals(currentFieldName)) { filterName = parser.text(); } else if ("_cache".equals(currentFieldName)) { cache = parseContext.parseFilterCachePolicy(); } else if ("_cache_key".equals(currentFieldName) || "_cacheKey".equals(currentFieldName)) { cacheKey = new HashedBytesRef(parser.text()); } else if (!scriptParameterParser.token(currentFieldName, token, parser)){ throw new QueryParsingException(parseContext.index(), "[script] filter does not support [" + currentFieldName + "]"); } } } ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue(); if (scriptValue != null) { script = scriptValue.script(); scriptType = scriptValue.scriptType(); } scriptLang = scriptParameterParser.lang(); if (script == null) { throw new QueryParsingException(parseContext.index(), "script must be provided with a [script] filter"); } if (params == null) { params = newHashMap(); } Filter filter = new ScriptFilter(scriptLang, script, scriptType, params, parseContext.scriptService(), parseContext.lookup()); if (cache != null) { filter = parseContext.cacheFilter(filter, cacheKey, cache); } if (filterName != null) { parseContext.addNamedFilter(filterName, filter); } return filter; } public static class ScriptFilter extends Filter { private final String script; private final Map<String, Object> params; private final SearchScript searchScript; public ScriptFilter(String scriptLang, String script, ScriptService.ScriptType scriptType, Map<String, Object> params, ScriptService scriptService, SearchLookup searchLookup) { this.script = script; this.params = params; this.searchScript = scriptService.search(searchLookup, scriptLang, script, scriptType, ScriptContext.Standard.SEARCH, newHashMap(params)); } @Override public String toString(String field) { StringBuilder buffer = new StringBuilder(); buffer.append("ScriptFilter("); buffer.append(script); buffer.append(")"); return buffer.toString(); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ScriptFilter that = (ScriptFilter) o; if (params != null ? !params.equals(that.params) : that.params != null) return false; if (script != null ? !script.equals(that.script) : that.script != null) return false; return true; } @Override public int hashCode() { int result = script != null ? script.hashCode() : 0; result = 31 * result + (params != null ? params.hashCode() : 0); return result; } @Override public DocIdSet getDocIdSet(LeafReaderContext context, Bits acceptDocs) throws IOException { final LeafSearchScript leafScript = searchScript.getLeafSearchScript(context); // LUCENE 4 UPGRADE: we can simply wrap this here since it is not cacheable and if we are not top level we will get a null passed anyway return BitsFilteredDocIdSet.wrap(new ScriptDocSet(context.reader().maxDoc(), acceptDocs, leafScript), acceptDocs); } static class ScriptDocSet extends DocValuesDocIdSet { private final LeafSearchScript searchScript; public ScriptDocSet(int maxDoc, @Nullable Bits acceptDocs, LeafSearchScript searchScript) { super(maxDoc, acceptDocs); this.searchScript = searchScript; } @Override protected boolean matchDoc(int doc) { searchScript.setDocument(doc); Object val = searchScript.run(); if (val == null) { return false; } if (val instanceof Boolean) { return (Boolean) val; } if (val instanceof Number) { return ((Number) val).longValue() != 0; } throw new ElasticsearchIllegalArgumentException("Can't handle type [" + val + "] in script filter"); } @Override public long ramBytesUsed() { return 0; } } } }