gt
stringclasses
1 value
context
stringlengths
2.05k
161k
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.concurrency; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ex.ApplicationUtil; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.progress.ProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.impl.CoreProgressManager; import com.intellij.openapi.progress.util.StandardProgressIndicatorBase; import com.intellij.util.Consumer; import com.intellij.util.Processor; import consulo.logging.Logger; import jakarta.inject.Singleton; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Queue; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; /** * @author cdr */ @Singleton public class JobLauncherImpl extends JobLauncher { private static final Logger LOG = Logger.getInstance(JobLauncherImpl.class); static final int CORES_FORK_THRESHOLD = 1; @Override public <T> boolean invokeConcurrentlyUnderProgress(@Nonnull final List<? extends T> things, ProgressIndicator progress, boolean runInReadAction, boolean failFastOnAcquireReadAction, @Nonnull final Processor<? super T> thingProcessor) throws ProcessCanceledException { // supply our own indicator even if we haven't given one - to support cancellation // use StandardProgressIndicator by default to avoid assertion in SensitiveProgressWrapper() ctr later final ProgressIndicator wrapper = progress == null ? new StandardProgressIndicatorBase() : new SensitiveProgressWrapper(progress); Boolean result = processImmediatelyIfTooFew(things, wrapper, runInReadAction, thingProcessor); if (result != null) return result.booleanValue(); ProgressManager pm = ProgressManager.getInstance(); Processor<? super T> processor = ((CoreProgressManager)pm).isPrioritizedThread(Thread.currentThread()) ? t -> pm.computePrioritized(() -> thingProcessor.process(t)) : thingProcessor; List<ApplierCompleter<T>> failedSubTasks = Collections.synchronizedList(new ArrayList<>()); ApplierCompleter<T> applier = new ApplierCompleter<>(null, runInReadAction, failFastOnAcquireReadAction, wrapper, things, processor, 0, things.size(), failedSubTasks, null); try { ForkJoinPool.commonPool().execute(applier); // call checkCanceled a bit more often than .invoke() while (!applier.isDone()) { ProgressManager.checkCanceled(); // does automatic compensation against starvation (in ForkJoinPool.awaitJoin) try { applier.get(10, TimeUnit.MILLISECONDS); } catch (TimeoutException ignored) { } catch (ExecutionException e) { throw e.getCause(); } } if (applier.throwable != null) { throw applier.throwable; } } catch (ApplierCompleter.ComputationAbortedException e) { // one of the processors returned false return false; } catch (ApplicationUtil.CannotRunReadActionException e) { // failFastOnAcquireReadAction==true and one of the processors called runReadAction() during the pending write action throw e; } catch (ProcessCanceledException e) { LOG.debug(e); // task1.processor returns false and the task cancels the indicator // then task2 calls checkCancel() and get here return false; } catch (RuntimeException | Error e) { throw e; } catch (Throwable e) { throw new RuntimeException(e); } //assert applier.isDone(); return applier.completeTaskWhichFailToAcquireReadAction(); } // if {@code things} are too few to be processed in the real pool, returns TRUE if processed successfully, FALSE if not // returns null if things need to be processed in the real pool private static <T> Boolean processImmediatelyIfTooFew(@Nonnull final List<? extends T> things, @Nonnull final ProgressIndicator progress, boolean runInReadAction, @Nonnull final Processor<? super T> thingProcessor) { // commit can be invoked from within write action //if (runInReadAction && ApplicationManager.getApplication().isWriteAccessAllowed()) { // throw new RuntimeException("Must not run invokeConcurrentlyUnderProgress() from under write action because of imminent deadlock"); //} if (things.isEmpty()) return true; if (things.size() <= 1 || JobSchedulerImpl.getJobPoolParallelism() <= CORES_FORK_THRESHOLD || runInReadAction && ApplicationManager.getApplication().isWriteAccessAllowed()) { final AtomicBoolean result = new AtomicBoolean(true); Runnable runnable = () -> ProgressManager.getInstance().executeProcessUnderProgress(() -> { //noinspection ForLoopReplaceableByForEach for (int i = 0; i < things.size(); i++) { T thing = things.get(i); if (!thingProcessor.process(thing)) { result.set(false); break; } } }, progress); if (runInReadAction) { ApplicationManager.getApplication().runReadAction(runnable); } else { runnable.run(); } return result.get(); } return null; } // This implementation is not really async @Nonnull @Override public Job<Void> submitToJobThread(@Nonnull final Runnable action, @Nullable Consumer<? super Future<?>> onDoneCallback) { VoidForkJoinTask task = new VoidForkJoinTask(action, onDoneCallback); task.submit(); return task; } private static class VoidForkJoinTask implements Job<Void> { private final Runnable myAction; private final Consumer<? super Future<?>> myOnDoneCallback; private enum Status { STARTED, EXECUTED } // null=not yet executed, STARTED=started execution, EXECUTED=finished private volatile Status myStatus; private final ForkJoinTask<Void> myForkJoinTask = new ForkJoinTask<Void>() { @Override public Void getRawResult() { return null; } @Override protected void setRawResult(Void value) { } @Override protected boolean exec() { myStatus = Status.STARTED; try { myAction.run(); complete(null); // complete manually before calling callback } catch (Throwable throwable) { completeExceptionally(throwable); } finally { myStatus = Status.EXECUTED; if (myOnDoneCallback != null) { myOnDoneCallback.consume(this); } } return true; } }; private VoidForkJoinTask(@Nonnull Runnable action, @Nullable Consumer<? super Future<?>> onDoneCallback) { myAction = action; myOnDoneCallback = onDoneCallback; } private void submit() { ForkJoinPool.commonPool().execute(myForkJoinTask); } //////////////// Job // when canceled in the middle of the execution returns false until finished @Override public boolean isDone() { boolean wasCancelled = myForkJoinTask.isCancelled(); // must be before status check Status status = myStatus; return status == Status.EXECUTED || status == null && wasCancelled; } @Override public boolean isCanceled() { return myForkJoinTask.isCancelled(); } @Override public void cancel() { myForkJoinTask.cancel(true); } // waits for the job to finish execution (when called on a canceled job in the middle of the execution, wait for finish) @Override public void waitForCompletion(int millis) throws InterruptedException, ExecutionException, TimeoutException { while (!isDone()) { try { myForkJoinTask.get(millis, TimeUnit.MILLISECONDS); break; } catch (CancellationException e) { // was canceled in the middle of execution // can't do anything but wait. help other tasks in the meantime if (!isDone()) { ForkJoinPool.commonPool().awaitQuiescence(millis, TimeUnit.MILLISECONDS); if (!isDone()) throw new TimeoutException(); } } } } } /** * Process all elements from the {@code failedToProcess} and then {@code things} concurrently in the underlying pool. * Processing happens concurrently maintaining {@code JobSchedulerImpl.CORES_COUNT} parallelism. * Stop when {@code tombStone} element is occurred. * If was unable to process some element, add it back to the {@code failedToProcess} queue. * * @return true if all elements processed successfully, false if at least one processor returned false or exception occurred */ public <T> boolean processQueue(@Nonnull final BlockingQueue<T> things, @Nonnull final Queue<T> failedToProcess, @Nonnull final ProgressIndicator progress, @Nonnull final T tombStone, @Nonnull final Processor<? super T> thingProcessor) { class MyTask implements Callable<Boolean> { private final int mySeq; private boolean result; private MyTask(int seq) { mySeq = seq; } @Override public Boolean call() { ProgressManager.getInstance().executeProcessUnderProgress(() -> { try { while (true) { ProgressManager.checkCanceled(); T element = failedToProcess.poll(); if (element == null) element = things.take(); if (element == tombStone) { things.offer(element); result = true; break; } try { if (!thingProcessor.process(element)) { result = false; break; } } catch (RuntimeException e) { failedToProcess.add(element); throw e; } } } catch (InterruptedException e) { throw new RuntimeException(e); } }, progress); return result; } @Override public String toString() { return super.toString() + " seq=" + mySeq; } } progress.checkCanceled(); // do not start up expensive threads if there's no need to boolean isSmallEnough = things.contains(tombStone); if (isSmallEnough) { try { // do not distribute for small queues return new MyTask(0).call(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new RuntimeException(e); } } List<ForkJoinTask<Boolean>> tasks = new ArrayList<>(); for (int i = 0; i < Math.max(1, JobSchedulerImpl.getJobPoolParallelism() - 1); i++) { tasks.add(ForkJoinPool.commonPool().submit(new MyTask(i))); } boolean result = true; RuntimeException exception = null; for (ForkJoinTask<Boolean> task : tasks) { try { result &= task.join(); } catch (RuntimeException e) { exception = e; } } if (exception != null) { throw exception; } return result; } }
/** * Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior * University * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. **/ package org.openflow.protocol; import java.io.Serializable; import java.nio.ByteBuffer; import java.util.Arrays; import net.floodlightcontroller.packet.Ethernet; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import org.jboss.netty.buffer.ChannelBuffer; import org.openflow.protocol.serializers.OFMatchJSONSerializer; import org.openflow.util.HexString; import org.openflow.util.U16; import org.openflow.util.U8; /** * Represents an ofp_match structure * * @author David Erickson (daviderickson@cs.stanford.edu) * @author Rob Sherwood (rob.sherwood@stanford.edu) */ @JsonSerialize(using = OFMatchJSONSerializer.class) public class OFMatch implements Cloneable, Serializable { /** * */ private static final long serialVersionUID = 1L; public static int MINIMUM_LENGTH = 40; final public static int OFPFW_ALL = ((1 << 22) - 1); final public static int OFPFW_IN_PORT = 1 << 0; /* Switch input port. */ final public static int OFPFW_DL_VLAN = 1 << 1; /* VLAN id. */ final public static int OFPFW_DL_SRC = 1 << 2; /* Ethernet source address. */ final public static int OFPFW_DL_DST = 1 << 3; /* * Ethernet destination * address. */ final public static int OFPFW_DL_TYPE = 1 << 4; /* Ethernet frame type. */ final public static int OFPFW_NW_PROTO = 1 << 5; /* IP protocol. */ final public static int OFPFW_TP_SRC = 1 << 6; /* TCP/UDP source port. */ final public static int OFPFW_TP_DST = 1 << 7; /* TCP/UDP destination port. */ /* * IP source address wildcard bit count. 0 is exact match, 1 ignores the * LSB, 2 ignores the 2 least-significant bits, ..., 32 and higher wildcard * the entire field. This is the *opposite* of the usual convention where * e.g. /24 indicates that 8 bits (not 24 bits) are wildcarded. */ final public static int OFPFW_NW_SRC_SHIFT = 8; final public static int OFPFW_NW_SRC_BITS = 6; final public static int OFPFW_NW_SRC_MASK = ((1 << OFPFW_NW_SRC_BITS) - 1) << OFPFW_NW_SRC_SHIFT; final public static int OFPFW_NW_SRC_ALL = 32 << OFPFW_NW_SRC_SHIFT; /* IP destination address wildcard bit count. Same format as source. */ final public static int OFPFW_NW_DST_SHIFT = 14; final public static int OFPFW_NW_DST_BITS = 6; final public static int OFPFW_NW_DST_MASK = ((1 << OFPFW_NW_DST_BITS) - 1) << OFPFW_NW_DST_SHIFT; final public static int OFPFW_NW_DST_ALL = 32 << OFPFW_NW_DST_SHIFT; final public static int OFPFW_DL_VLAN_PCP = 1 << 20; /* VLAN priority. */ final public static int OFPFW_NW_TOS = 1 << 21; /* * IP ToS (DSCP field, 6 * bits). */ final public static int OFPFW_ALL_SANITIZED = (((1 << 22) - 1) & ~OFPFW_NW_SRC_MASK & ~OFPFW_NW_DST_MASK) | OFPFW_NW_SRC_ALL | OFPFW_NW_DST_ALL; /* List of Strings for marshalling and unmarshalling to human readable forms */ final public static String STR_IN_PORT = "in_port"; final public static String STR_DL_DST = "dl_dst"; final public static String STR_DL_SRC = "dl_src"; final public static String STR_DL_TYPE = "dl_type"; final public static String STR_DL_VLAN = "dl_vlan"; final public static String STR_DL_VLAN_PCP = "dl_vlan_pcp"; final public static String STR_NW_DST = "nw_dst"; final public static String STR_NW_SRC = "nw_src"; final public static String STR_NW_PROTO = "nw_proto"; final public static String STR_NW_TOS = "nw_tos"; final public static String STR_TP_DST = "tp_dst"; final public static String STR_TP_SRC = "tp_src"; protected int wildcards; protected short inputPort; protected byte[] dataLayerSource; protected byte[] dataLayerDestination; protected short dataLayerVirtualLan; protected byte dataLayerVirtualLanPriorityCodePoint; protected short dataLayerType; protected byte networkTypeOfService; protected byte networkProtocol; protected int networkSource; protected int networkDestination; protected short transportSource; protected short transportDestination; /** * By default, create a OFMatch that matches everything (mostly because it's * the least amount of work to make a valid OFMatch) */ public OFMatch() { this.wildcards = OFPFW_ALL; this.dataLayerDestination = new byte[] { 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 }; this.dataLayerSource = new byte[] { 0x0, 0x0, 0x0, 0x0, 0x0, 0x0 }; this.dataLayerVirtualLan = Ethernet.VLAN_UNTAGGED; this.dataLayerVirtualLanPriorityCodePoint = 0; this.dataLayerType = 0; this.inputPort = 0; this.networkProtocol = 0; this.networkTypeOfService = 0; this.networkSource = 0; this.networkDestination = 0; this.transportDestination = 0; this.transportSource = 0; } /** * Get dl_dst * * @return an arrays of bytes */ public byte[] getDataLayerDestination() { return this.dataLayerDestination; } /** * Set dl_dst * * @param dataLayerDestination */ public OFMatch setDataLayerDestination(byte[] dataLayerDestination) { this.dataLayerDestination = dataLayerDestination; return this; } /** * Set dl_dst, but first translate to byte[] using HexString * * @param mac * A colon separated string of 6 pairs of octets, e..g., * "00:17:42:EF:CD:8D" */ public OFMatch setDataLayerDestination(String mac) { byte bytes[] = HexString.fromHexString(mac); if (bytes.length != 6) throw new IllegalArgumentException( "expected string with 6 octets, got '" + mac + "'"); this.dataLayerDestination = bytes; return this; } /** * Get dl_src * * @return an array of bytes */ public byte[] getDataLayerSource() { return this.dataLayerSource; } /** * Set dl_src * * @param dataLayerSource */ public OFMatch setDataLayerSource(byte[] dataLayerSource) { this.dataLayerSource = dataLayerSource; return this; } /** * Set dl_src, but first translate to byte[] using HexString * * @param mac * A colon separated string of 6 pairs of octets, e..g., * "00:17:42:EF:CD:8D" */ public OFMatch setDataLayerSource(String mac) { byte bytes[] = HexString.fromHexString(mac); if (bytes.length != 6) throw new IllegalArgumentException( "expected string with 6 octets, got '" + mac + "'"); this.dataLayerSource = bytes; return this; } /** * Get dl_type * * @return ether_type */ public short getDataLayerType() { return this.dataLayerType; } /** * Set dl_type * * @param dataLayerType */ public OFMatch setDataLayerType(short dataLayerType) { this.dataLayerType = dataLayerType; return this; } /** * Get dl_vlan * * @return vlan tag; VLAN_NONE == no tag */ public short getDataLayerVirtualLan() { return this.dataLayerVirtualLan; } /** * Set dl_vlan * * @param dataLayerVirtualLan */ public OFMatch setDataLayerVirtualLan(short dataLayerVirtualLan) { this.dataLayerVirtualLan = dataLayerVirtualLan; return this; } /** * Get dl_vlan_pcp * * @return */ public byte getDataLayerVirtualLanPriorityCodePoint() { return this.dataLayerVirtualLanPriorityCodePoint; } /** * Set dl_vlan_pcp * * @param pcp */ public OFMatch setDataLayerVirtualLanPriorityCodePoint(byte pcp) { this.dataLayerVirtualLanPriorityCodePoint = pcp; return this; } /** * Get in_port * * @return */ public short getInputPort() { return this.inputPort; } /** * Set in_port * * @param inputPort */ public OFMatch setInputPort(short inputPort) { this.inputPort = inputPort; return this; } /** * Get nw_dst * * @return */ public int getNetworkDestination() { return this.networkDestination; } /** * Set nw_dst * * @param networkDestination */ public OFMatch setNetworkDestination(int networkDestination) { this.networkDestination = networkDestination; return this; } /** * Parse this match's wildcard fields and return the number of significant * bits in the IP destination field. NOTE: this returns the number of bits * that are fixed, i.e., like CIDR, not the number of bits that are free * like OpenFlow encodes. * * @return a number between 0 (matches all IPs) and 63 ( 32>= implies exact * match) */ public int getNetworkDestinationMaskLen() { return Math.max(32 - ((wildcards & OFPFW_NW_DST_MASK) >> OFPFW_NW_DST_SHIFT), 0); } /** * Parse this match's wildcard fields and return the number of significant * bits in the IP destination field. NOTE: this returns the number of bits * that are fixed, i.e., like CIDR, not the number of bits that are free * like OpenFlow encodes. * * @return a number between 0 (matches all IPs) and 32 (exact match) */ public int getNetworkSourceMaskLen() { return Math.max(32 - ((wildcards & OFPFW_NW_SRC_MASK) >> OFPFW_NW_SRC_SHIFT), 0); } /** * Get nw_proto * * @return */ public byte getNetworkProtocol() { return this.networkProtocol; } /** * Set nw_proto * * @param networkProtocol */ public OFMatch setNetworkProtocol(byte networkProtocol) { this.networkProtocol = networkProtocol; return this; } /** * Get nw_src * * @return */ public int getNetworkSource() { return this.networkSource; } /** * Set nw_src * * @param networkSource */ public OFMatch setNetworkSource(int networkSource) { this.networkSource = networkSource; return this; } /** * Get nw_tos OFMatch stores the ToS bits as top 6-bits, so right shift by 2 * bits before returning the value * * @return : 6-bit DSCP value (0-63) */ public byte getNetworkTypeOfService() { return (byte) ((this.networkTypeOfService >> 2) & 0x3f); } /** * Set nw_tos OFMatch stores the ToS bits as top 6-bits, so left shift by 2 * bits before storing the value * * @param networkTypeOfService * : 6-bit DSCP value (0-63) */ public OFMatch setNetworkTypeOfService(byte networkTypeOfService) { this.networkTypeOfService = (byte) (networkTypeOfService << 2); return this; } /** * Get tp_dst * * @return */ public short getTransportDestination() { return this.transportDestination; } /** * Set tp_dst * * @param transportDestination */ public OFMatch setTransportDestination(short transportDestination) { this.transportDestination = transportDestination; return this; } /** * Get tp_src * * @return */ public short getTransportSource() { return this.transportSource; } /** * Set tp_src * * @param transportSource */ public OFMatch setTransportSource(short transportSource) { this.transportSource = transportSource; return this; } /** * Get wildcards * * @return */ public int getWildcards() { return this.wildcards; } /** * Get wildcards * * @return */ public Wildcards getWildcardObj() { return Wildcards.of(wildcards); } /** * Set wildcards * * @param wildcards */ public OFMatch setWildcards(int wildcards) { this.wildcards = wildcards; return this; } /** set the wildcard using the Wildcards convenience object */ public OFMatch setWildcards(Wildcards wildcards) { this.wildcards = wildcards.getInt(); return this; } /** * Initializes this OFMatch structure with the corresponding data from the * specified packet. Must specify the input port, to ensure that * this.in_port is set correctly. Specify OFPort.NONE or OFPort.ANY if input * port not applicable or available * * @param packetData * The packet's data * @param inputPort * the port the packet arrived on */ public OFMatch loadFromPacket(byte[] packetData, short inputPort) { short scratch; int transportOffset = 34; ByteBuffer packetDataBB = ByteBuffer.wrap(packetData); int limit = packetDataBB.limit(); this.wildcards = 0; // all fields have explicit entries this.inputPort = inputPort; if (inputPort == OFPort.OFPP_ALL.getValue()) this.wildcards |= OFPFW_IN_PORT; assert (limit >= 14); // dl dst this.dataLayerDestination = new byte[6]; packetDataBB.get(this.dataLayerDestination); // dl src this.dataLayerSource = new byte[6]; packetDataBB.get(this.dataLayerSource); // dl type this.dataLayerType = packetDataBB.getShort(); if (getDataLayerType() != (short) 0x8100) { // need cast to avoid signed // bug setDataLayerVirtualLan((short) 0xffff); setDataLayerVirtualLanPriorityCodePoint((byte) 0); } else { // has vlan tag scratch = packetDataBB.getShort(); setDataLayerVirtualLan((short) (0xfff & scratch)); setDataLayerVirtualLanPriorityCodePoint((byte) ((0xe000 & scratch) >> 13)); this.dataLayerType = packetDataBB.getShort(); } switch (getDataLayerType()) { case 0x0800: // ipv4 // check packet length scratch = packetDataBB.get(); scratch = (short) (0xf & scratch); transportOffset = (packetDataBB.position() - 1) + (scratch * 4); // nw tos (dscp) scratch = packetDataBB.get(); setNetworkTypeOfService((byte) ((0xfc & scratch) >> 2)); // nw protocol packetDataBB.position(packetDataBB.position() + 7); this.networkProtocol = packetDataBB.get(); // nw src packetDataBB.position(packetDataBB.position() + 2); this.networkSource = packetDataBB.getInt(); // nw dst this.networkDestination = packetDataBB.getInt(); packetDataBB.position(transportOffset); break; case 0x0806: // arp int arpPos = packetDataBB.position(); // opcode scratch = packetDataBB.getShort(arpPos + 6); setNetworkProtocol((byte) (0xff & scratch)); scratch = packetDataBB.getShort(arpPos + 2); // if ipv4 and addr len is 4 if (scratch == 0x800 && packetDataBB.get(arpPos + 5) == 4) { // nw src this.networkSource = packetDataBB.getInt(arpPos + 14); // nw dst this.networkDestination = packetDataBB.getInt(arpPos + 24); } else { setNetworkSource(0); setNetworkDestination(0); } break; default: // Not ARP or IP. Wildcard NW_DST and NW_SRC this.wildcards |= OFPFW_NW_DST_ALL | OFPFW_NW_SRC_ALL | OFPFW_NW_PROTO | OFPFW_NW_TOS; setNetworkTypeOfService((byte) 0); setNetworkProtocol((byte) 0); setNetworkSource(0); setNetworkDestination(0); break; } switch (getNetworkProtocol()) { case 0x01: // icmp // type this.transportSource = U8.f(packetDataBB.get()); // code this.transportDestination = U8.f(packetDataBB.get()); break; case 0x06: // tcp // tcp src this.transportSource = packetDataBB.getShort(); // tcp dest this.transportDestination = packetDataBB.getShort(); break; case 0x11: // udp // udp src this.transportSource = packetDataBB.getShort(); // udp dest this.transportDestination = packetDataBB.getShort(); break; default: // Unknown network proto. this.wildcards |= OFPFW_TP_DST | OFPFW_TP_SRC; setTransportDestination((short) 0); setTransportSource((short) 0); break; } return this; } /** * Read this message off the wire from the specified ByteBuffer * * @param data */ public void readFrom(ChannelBuffer data) { this.wildcards = data.readInt(); this.inputPort = data.readShort(); this.dataLayerSource = new byte[6]; data.readBytes(this.dataLayerSource); this.dataLayerDestination = new byte[6]; data.readBytes(this.dataLayerDestination); this.dataLayerVirtualLan = data.readShort(); this.dataLayerVirtualLanPriorityCodePoint = data.readByte(); data.readByte(); // pad this.dataLayerType = data.readShort(); this.networkTypeOfService = data.readByte(); this.networkProtocol = data.readByte(); data.readByte(); // pad data.readByte(); // pad this.networkSource = data.readInt(); this.networkDestination = data.readInt(); this.transportSource = data.readShort(); this.transportDestination = data.readShort(); } /** * Write this message's binary format to the specified ByteBuffer * * @param data */ public void writeTo(ChannelBuffer data) { data.writeInt(wildcards); data.writeShort(inputPort); data.writeBytes(this.dataLayerSource); data.writeBytes(this.dataLayerDestination); data.writeShort(dataLayerVirtualLan); data.writeByte(dataLayerVirtualLanPriorityCodePoint); data.writeByte((byte) 0x0); // pad data.writeShort(dataLayerType); data.writeByte(networkTypeOfService); data.writeByte(networkProtocol); data.writeByte((byte) 0x0); // pad data.writeByte((byte) 0x0); // pad data.writeInt(networkSource); data.writeInt(networkDestination); data.writeShort(transportSource); data.writeShort(transportDestination); } @Override public int hashCode() { final int prime = 131; int result = 1; result = prime * result + Arrays.hashCode(dataLayerDestination); result = prime * result + Arrays.hashCode(dataLayerSource); result = prime * result + dataLayerType; result = prime * result + dataLayerVirtualLan; result = prime * result + dataLayerVirtualLanPriorityCodePoint; result = prime * result + inputPort; result = prime * result + networkDestination; result = prime * result + networkProtocol; result = prime * result + networkSource; result = prime * result + networkTypeOfService; result = prime * result + transportDestination; result = prime * result + transportSource; result = prime * result + wildcards; return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (!(obj instanceof OFMatch)) { return false; } OFMatch other = (OFMatch) obj; if (!Arrays.equals(dataLayerDestination, other.dataLayerDestination)) { return false; } if (!Arrays.equals(dataLayerSource, other.dataLayerSource)) { return false; } if (dataLayerType != other.dataLayerType) { return false; } if (dataLayerVirtualLan != other.dataLayerVirtualLan) { return false; } if (dataLayerVirtualLanPriorityCodePoint != other.dataLayerVirtualLanPriorityCodePoint) { return false; } if (inputPort != other.inputPort) { return false; } if (networkDestination != other.networkDestination) { return false; } if (networkProtocol != other.networkProtocol) { return false; } if (networkSource != other.networkSource) { return false; } if (networkTypeOfService != other.networkTypeOfService) { return false; } if (transportDestination != other.transportDestination) { return false; } if (transportSource != other.transportSource) { return false; } if ((wildcards & OFMatch.OFPFW_ALL) != (other.wildcards & OFPFW_ALL)) { // only // consider // allocated // part // of // wildcards return false; } return true; } /** * Implement clonable interface */ @Override public OFMatch clone() { try { OFMatch ret = (OFMatch) super.clone(); ret.dataLayerDestination = this.dataLayerDestination.clone(); ret.dataLayerSource = this.dataLayerSource.clone(); return ret; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } /** * matching two OFMatch * @param toCompare * @return */ public boolean match(OFMatch toCompare) { if ((wildcards & OFPFW_IN_PORT) == 0 && this.inputPort != toCompare.getInputPort()) return false; if ((wildcards & OFPFW_DL_DST) == 0 && !Arrays.equals(this.dataLayerDestination, toCompare.getDataLayerDestination())) return false; if ((wildcards & OFPFW_DL_SRC) == 0 && !Arrays.equals(this.dataLayerSource, toCompare.getDataLayerSource())) return false; if ((wildcards & OFPFW_DL_TYPE) == 0 && this.dataLayerType != toCompare.getDataLayerType()) return false; if ((wildcards & OFPFW_DL_VLAN) == 0 && this.dataLayerVirtualLan != toCompare.getDataLayerVirtualLan()) return false; if ((wildcards & OFPFW_DL_VLAN_PCP) == 0 && this.dataLayerVirtualLanPriorityCodePoint != toCompare.getDataLayerVirtualLanPriorityCodePoint()) return false; if ((wildcards & OFPFW_NW_PROTO) == 0 && this.networkProtocol != toCompare.getNetworkProtocol()) return false; if ((wildcards & OFPFW_NW_TOS) == 0 && this.networkTypeOfService != toCompare.getNetworkTypeOfService()) return false; //compare network layer src/dst int dstmasklen = getNetworkDestinationMaskLen(); int srcmasklen = getNetworkSourceMaskLen(); if (dstmasklen >= 32 && networkDestination != toCompare.getNetworkDestination()) return false; if (srcmasklen >= 32 && networkSource != toCompare.getNetworkSource()) return false; int dstmask = ~((1 << (32 - dstmasklen)) - 1); int srcmask = ~((1 << (32 - srcmasklen)) - 1); if (dstmasklen < 32 && (networkDestination & dstmask) != (toCompare.getNetworkDestination() & dstmask)) return false; if (srcmasklen < 32 && (networkSource & srcmask) != (toCompare.getNetworkSource() & srcmask)) return false; //layer 4 if ((wildcards & OFPFW_TP_DST) == 0 && this.transportDestination != toCompare.getTransportDestination()) return false; if ((wildcards & OFPFW_TP_SRC) == 0 && this.transportSource != toCompare.getTransportSource()) return false; return true; } /** * Output a dpctl-styled string, i.e., only list the elements that are not * wildcarded A match-everything OFMatch outputs "OFMatch[]" * * @return * "OFMatch[dl_src:00:20:01:11:22:33,nw_src:192.168.0.0/24,tp_dst:80]" */ @Override public String toString() { String str = ""; // l1 if ((wildcards & OFPFW_IN_PORT) == 0) str += "," + STR_IN_PORT + "=" + U16.f(this.inputPort); // l2 if ((wildcards & OFPFW_DL_DST) == 0) str += "," + STR_DL_DST + "=" + HexString.toHexString(this.dataLayerDestination); if ((wildcards & OFPFW_DL_SRC) == 0) str += "," + STR_DL_SRC + "=" + HexString.toHexString(this.dataLayerSource); if ((wildcards & OFPFW_DL_TYPE) == 0) str += "," + STR_DL_TYPE + "=0x" + Integer.toHexString(U16.f(this.dataLayerType)); if ((wildcards & OFPFW_DL_VLAN) == 0) str += "," + STR_DL_VLAN + "=0x" + Integer.toHexString(U16.f(this.dataLayerVirtualLan)); if ((wildcards & OFPFW_DL_VLAN_PCP) == 0) str += "," + STR_DL_VLAN_PCP + "=" + Integer.toHexString(U8.f(this.dataLayerVirtualLanPriorityCodePoint)); // l3 if (getNetworkDestinationMaskLen() > 0) str += "," + STR_NW_DST + "=" + cidrToString(networkDestination, getNetworkDestinationMaskLen()); if (getNetworkSourceMaskLen() > 0) str += "," + STR_NW_SRC + "=" + cidrToString(networkSource, getNetworkSourceMaskLen()); if ((wildcards & OFPFW_NW_PROTO) == 0) str += "," + STR_NW_PROTO + "=" + this.networkProtocol; if ((wildcards & OFPFW_NW_TOS) == 0) str += "," + STR_NW_TOS + "=" + this.getNetworkTypeOfService(); // l4 if ((wildcards & OFPFW_TP_DST) == 0) str += "," + STR_TP_DST + "=" + this.transportDestination; if ((wildcards & OFPFW_TP_SRC) == 0) str += "," + STR_TP_SRC + "=" + this.transportSource; if ((str.length() > 0) && (str.charAt(0) == ',')) str = str.substring(1); // trim // the // leading // "," // done return "OFMatch[" + str + "]"; } /** * Return a string including all match fields, regardless whether they * are wildcarded or not. */ public String toStringUnmasked() { String str = ""; // l1 str += STR_IN_PORT + "=" + U16.f(this.inputPort); // l2 str += "," + STR_DL_DST + "=" + HexString.toHexString(this.dataLayerDestination); str += "," + STR_DL_SRC + "=" + HexString.toHexString(this.dataLayerSource); str += "," + STR_DL_TYPE + "=0x" + Integer.toHexString(U16.f(this.dataLayerType)); str += "," + STR_DL_VLAN + "=0x" + Integer.toHexString(U16.f(this.dataLayerVirtualLan)); str += "," + STR_DL_VLAN_PCP + "=" + Integer.toHexString(U8.f(this.dataLayerVirtualLanPriorityCodePoint)); // l3 str += "," + STR_NW_DST + "=" + cidrToString(networkDestination, getNetworkDestinationMaskLen()); str += "," + STR_NW_SRC + "=" + cidrToString(networkSource, getNetworkSourceMaskLen()); str += "," + STR_NW_PROTO + "=" + this.networkProtocol; str += "," + STR_NW_TOS + "=" + this.getNetworkTypeOfService(); // l4 str += "," + STR_TP_DST + "=" + this.transportDestination; str += "," + STR_TP_SRC + "=" + this.transportSource; // wildcards str += ", wildcards=" + debugWildCards(wildcards); return "OFMatch[" + str + "]"; } /** * debug a set of wildcards */ public static String debugWildCards(int wildcards) { String str = ""; // l1 if ((wildcards & OFPFW_IN_PORT) != 0) str += "|" + STR_IN_PORT; // l2 if ((wildcards & OFPFW_DL_DST) != 0) str += "|" + STR_DL_DST; if ((wildcards & OFPFW_DL_SRC) != 0) str += "|" + STR_DL_SRC; if ((wildcards & OFPFW_DL_TYPE) != 0) str += "|" + STR_DL_TYPE; if ((wildcards & OFPFW_DL_VLAN) != 0) str += "|" + STR_DL_VLAN; if ((wildcards & OFPFW_DL_VLAN_PCP) != 0) str += "|" + STR_DL_VLAN_PCP; int nwDstMask = Math.max(32 - ((wildcards & OFPFW_NW_DST_MASK) >> OFPFW_NW_DST_SHIFT), 0); int nwSrcMask = Math.max(32 - ((wildcards & OFPFW_NW_SRC_MASK) >> OFPFW_NW_SRC_SHIFT), 0); // l3 if (nwDstMask < 32) str += "|" + STR_NW_DST + "(/" + nwDstMask + ")"; if (nwSrcMask < 32) str += "|" + STR_NW_SRC + "(/" + nwSrcMask + ")"; if ((wildcards & OFPFW_NW_PROTO) != 0) str += "|" + STR_NW_PROTO; if ((wildcards & OFPFW_NW_TOS) != 0) str += "|" + STR_NW_TOS; // l4 if ((wildcards & OFPFW_TP_DST) != 0) str += "|" + STR_TP_DST; if ((wildcards & OFPFW_TP_SRC) != 0) str += "|" + STR_TP_SRC; if ((str.length() > 0) && (str.charAt(0) == '|')) str = str.substring(1); // trim // the // leading // "," // done return str; } private String cidrToString(int ip, int prefix) { String str; if (prefix >= 32) { str = ipToString(ip); } else { // use the negation of mask to fake endian magic int mask = ~((1 << (32 - prefix)) - 1); str = ipToString(ip & mask) + "/" + prefix; } return str; } /** * Set this OFMatch's parameters based on a comma-separated key=value pair * dpctl-style string, e.g., from the output of OFMatch.toString() <br> * <p> * Supported keys/values include <br> * <p> * <TABLE border=1> * <TR> * <TD>KEY(s) * <TD>VALUE * </TR> * <TR> * <TD>"in_port","input_port" * <TD>integer * </TR> * <TR> * <TD>"dl_src","eth_src", "dl_dst","eth_dst" * <TD>hex-string * </TR> * <TR> * <TD>"dl_type", "dl_vlan", "dl_vlan_pcp" * <TD>integer * </TR> * <TR> * <TD>"nw_src", "nw_dst", "ip_src", "ip_dst" * <TD>CIDR-style netmask * </TR> * <TR> * <TD>"tp_src","tp_dst" * <TD>integer (max 64k) * </TR> * </TABLE> * <p> * The CIDR-style netmasks assume 32 netmask if none given, so: * "128.8.128.118/32" is the same as "128.8.128.118" * * @param match * a key=value comma separated string, e.g. * "in_port=5,ip_dst=192.168.0.0/16,tp_src=80" * @throws IllegalArgumentException * on unexpected key or value */ public void fromString(String match) throws IllegalArgumentException { if (match.equals("") || match.equalsIgnoreCase("any") || match.equalsIgnoreCase("all") || match.equals("[]")) match = "OFMatch[]"; String[] tokens = match.split("[\\[,\\]]"); String[] values; int initArg = 0; if (tokens[0].equals("OFMatch")) initArg = 1; this.wildcards = OFPFW_ALL; int i; for (i = initArg; i < tokens.length; i++) { values = tokens[i].split("="); if (values.length != 2) { throw new IllegalArgumentException( "Token " + tokens[i] + " does not have form 'key=value' parsing " + match); } values[0] = values[0].toLowerCase(); // try to make this case insens if (values[0].equals(STR_IN_PORT) || values[0].equals("input_port")) { this.inputPort = U16.t(Integer.valueOf(values[1])); this.wildcards &= ~OFPFW_IN_PORT; } else if (values[0].equals(STR_DL_DST) || values[0].equals("eth_dst")) { this.dataLayerDestination = HexString.fromHexString(values[1]); this.wildcards &= ~OFPFW_DL_DST; } else if (values[0].equals(STR_DL_SRC) || values[0].equals("eth_src")) { this.dataLayerSource = HexString.fromHexString(values[1]); this.wildcards &= ~OFPFW_DL_SRC; } else if (values[0].equals(STR_DL_TYPE) || values[0].equals("eth_type")) { if (values[1].startsWith("0x")) this.dataLayerType = U16.t(Integer.valueOf(values[1].replaceFirst("0x", ""), 16)); else this.dataLayerType = U16.t(Integer.valueOf(values[1])); this.wildcards &= ~OFPFW_DL_TYPE; } else if (values[0].equals(STR_DL_VLAN)) { if (values[1].startsWith("0x")) this.dataLayerVirtualLan = U16.t(Integer.valueOf(values[1].replaceFirst("0x", ""), 16)); else this.dataLayerVirtualLan = U16.t(Integer.valueOf(values[1])); this.wildcards &= ~OFPFW_DL_VLAN; } else if (values[0].equals(STR_DL_VLAN_PCP)) { this.dataLayerVirtualLanPriorityCodePoint = U8.t(Short.valueOf(values[1])); this.wildcards &= ~OFPFW_DL_VLAN_PCP; } else if (values[0].equals(STR_NW_DST) || values[0].equals("ip_dst")) { setFromCIDR(values[1], STR_NW_DST); } else if (values[0].equals(STR_NW_SRC) || values[0].equals("ip_src")) { setFromCIDR(values[1], STR_NW_SRC); } else if (values[0].equals(STR_NW_PROTO)) { if (values[1].startsWith("0x")) this.networkProtocol = U8.t(Short.valueOf(values[1].replaceFirst("0x",""),16)); else this.networkProtocol = U8.t(Short.valueOf(values[1])); this.wildcards &= ~OFPFW_NW_PROTO; } else if (values[0].equals(STR_NW_TOS)) { this.setNetworkTypeOfService(U8.t(Short.valueOf(values[1]))); this.wildcards &= ~OFPFW_NW_TOS; } else if (values[0].equals(STR_TP_DST)) { this.transportDestination = U16.t(Integer.valueOf(values[1])); this.wildcards &= ~OFPFW_TP_DST; } else if (values[0].equals(STR_TP_SRC)) { this.transportSource = U16.t(Integer.valueOf(values[1])); this.wildcards &= ~OFPFW_TP_SRC; } else { throw new IllegalArgumentException("unknown token " + tokens[i] + " parsing " + match); } } } /** * Set the networkSource or networkDestionation address and their wildcards * from the CIDR string * * @param cidr * "192.168.0.0/16" or "172.16.1.5" * @param which * one of STR_NW_DST or STR_NW_SRC * @throws IllegalArgumentException */ private void setFromCIDR(String cidr, String which) throws IllegalArgumentException { String values[] = cidr.split("/"); String[] ip_str = values[0].split("\\."); int ip = 0; ip += Integer.valueOf(ip_str[0]) << 24; ip += Integer.valueOf(ip_str[1]) << 16; ip += Integer.valueOf(ip_str[2]) << 8; ip += Integer.valueOf(ip_str[3]); int prefix = 32; // all bits are fixed, by default if (values.length >= 2) prefix = Integer.valueOf(values[1]); int mask = 32 - prefix; if (which.equals(STR_NW_DST)) { this.networkDestination = ip; this.wildcards = (wildcards & ~OFPFW_NW_DST_MASK) | (mask << OFPFW_NW_DST_SHIFT); } else if (which.equals(STR_NW_SRC)) { this.networkSource = ip; this.wildcards = (wildcards & ~OFPFW_NW_SRC_MASK) | (mask << OFPFW_NW_SRC_SHIFT); } } protected static String ipToString(int ip) { return Integer.toString(U8.f((byte) ((ip & 0xff000000) >> 24))) + "." + Integer.toString((ip & 0x00ff0000) >> 16) + "." + Integer.toString((ip & 0x0000ff00) >> 8) + "." + Integer.toString(ip & 0x000000ff); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package backtype.storm.utils; import java.util.HashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import backtype.storm.utils.disruptor.AbstractSequencerExt; import backtype.storm.utils.disruptor.RingBuffer; import com.lmax.disruptor.AlertException; import com.lmax.disruptor.EventFactory; import com.lmax.disruptor.EventHandler; import com.lmax.disruptor.InsufficientCapacityException; import com.lmax.disruptor.Sequence; import com.lmax.disruptor.SequenceBarrier; import com.lmax.disruptor.TimeoutException; import com.lmax.disruptor.WaitStrategy; import com.lmax.disruptor.dsl.ProducerType; /** * * A single consumer queue that uses the LMAX Disruptor. They key to the performance is the ability to catch up to the producer by processing tuples in batches. */ public class DisruptorQueueImpl extends DisruptorQueue { private static final Logger LOG = LoggerFactory.getLogger(DisruptorQueueImpl.class); static boolean useSleep = true; public static void setUseSleep(boolean useSleep) { AbstractSequencerExt.setWaitSleep(useSleep); } private static final Object FLUSH_CACHE = new Object(); private static final Object INTERRUPT = new Object(); private static final String PREFIX = "disruptor-"; private final String _queueName; private final RingBuffer<MutableObject> _buffer; private final Sequence _consumer; private final SequenceBarrier _barrier; // TODO: consider having a threadlocal cache of this variable to speed up // reads? //volatile boolean consumerStartedFlag = false; private final HashMap<String, Object> state = new HashMap<String, Object>(4); private final ConcurrentLinkedQueue<Object> _cache = new ConcurrentLinkedQueue<Object>(); private final ReentrantReadWriteLock cacheLock = new ReentrantReadWriteLock(); private final Lock readLock = cacheLock.readLock(); private final Lock writeLock = cacheLock.writeLock(); public DisruptorQueueImpl(String queueName, ProducerType producerType, int bufferSize, WaitStrategy wait) { this._queueName = PREFIX + queueName; _buffer = RingBuffer.create(producerType, new ObjectEventFactory(), bufferSize, wait); _consumer = new Sequence(); _barrier = _buffer.newBarrier(); _buffer.addGatingSequences(_consumer); /*if (producerType == ProducerType.SINGLE) { consumerStartedFlag = true; } else { // make sure we flush the pending messages in cache first if (bufferSize < 2) { throw new RuntimeException("QueueSize must >= 2"); } try { publishDirect(FLUSH_CACHE, true); } catch (InsufficientCapacityException e) { throw new RuntimeException("This code should be unreachable!", e); } }*/ } public String getName() { return _queueName; } public void consumeBatch(EventHandler<Object> handler) { //write pos > read pos if (_buffer.getCursor() > _consumer.get()) consumeBatchWhenAvailable(handler); } public void haltWithInterrupt() { publish(INTERRUPT); } public Object poll() { // @@@ // should use _cache.isEmpty, but it is slow // I will change the logic later //if (consumerStartedFlag == false) { // return _cache.poll(); //} final long nextSequence = _consumer.get() + 1; if (nextSequence <= _barrier.getCursor()) { MutableObject mo = _buffer.get(nextSequence); _consumer.set(nextSequence); Object ret = mo.o; mo.setObject(null); return ret; } return null; } public Object take() { // @@@ // should use _cache.isEmpty, but it is slow // I will change the logic later //if (consumerStartedFlag == false) { // return _cache.poll(); //} final long nextSequence = _consumer.get() + 1; // final long availableSequence; try { _barrier.waitFor(nextSequence); } catch (AlertException e) { LOG.error(e.getMessage(), e); throw new RuntimeException(e); } catch (InterruptedException e) { LOG.error("InterruptedException " + e.getCause()); // throw new RuntimeException(e); return null; } catch (TimeoutException e) { // LOG.error(e.getCause(), e); return null; } MutableObject mo = _buffer.get(nextSequence); _consumer.set(nextSequence); Object ret = mo.o; mo.setObject(null); return ret; } public void consumeBatchWhenAvailable(EventHandler<Object> handler) { try { final long nextSequence = _consumer.get() + 1; final long availableSequence = _barrier.waitFor(nextSequence); if (availableSequence >= nextSequence) { consumeBatchToCursor(availableSequence, handler); } } catch (AlertException e) { LOG.error(e.getMessage(), e); throw new RuntimeException(e); } catch (InterruptedException e) { LOG.error("InterruptedException " + e.getCause()); return; } catch (TimeoutException e) { // LOG.error(e.getCause(), e); return; } } public void consumeBatchToCursor(long cursor, EventHandler<Object> handler) { for (long curr = _consumer.get() + 1; curr <= cursor; curr++) { try { MutableObject mo = _buffer.get(curr); Object o = mo.o; mo.setObject(null); /*if (o == FLUSH_CACHE) { Object c = null; while (true) { c = _cache.poll(); if (c == null) break; else handler.onEvent(c, curr, true); } } else if (o == INTERRUPT) { throw new InterruptedException("Disruptor processing interrupted"); } else {*/ handler.onEvent(o, curr, curr == cursor); //} } catch (InterruptedException e) { // throw new RuntimeException(e); LOG.error(e.getMessage(), e); return; } catch (Exception e) { LOG.error(e.getMessage(), e); throw new RuntimeException(e); } } // TODO: only set this if the consumer cursor has changed? _consumer.set(cursor); } /* * Caches until consumerStarted is called, upon which the cache is flushed to the consumer */ public void publish(Object obj) { try { publish(obj, true); } catch (InsufficientCapacityException ex) { throw new RuntimeException("This code should be unreachable!"); } } public void tryPublish(Object obj) throws InsufficientCapacityException { publish(obj, false); } public void publish(Object obj, boolean block) throws InsufficientCapacityException { /*boolean publishNow = consumerStartedFlag; if (!publishNow) { readLock.lock(); try { publishNow = consumerStartedFlag; if (!publishNow) { _cache.add(obj); } } finally { readLock.unlock(); } } if (publishNow) {*/ publishDirect(obj, block); //} } protected void publishDirect(Object obj, boolean block) throws InsufficientCapacityException { final long id; if (block) { id = _buffer.next(); } else { id = _buffer.tryNext(1); } final MutableObject m = _buffer.get(id); m.setObject(obj); _buffer.publish(id); } /* public void consumerStarted() { writeLock.lock(); consumerStartedFlag = true; writeLock.unlock(); }*/ public void clear() { while (population() != 0L) { poll(); } } public long population() { return (writePos() - readPos()); } public long capacity() { return _buffer.getBufferSize(); } public long writePos() { return _buffer.getCursor(); } public long readPos() { return _consumer.get(); } public float pctFull() { return (1.0F * population() / capacity()); } @Override public Object getState() { // get readPos then writePos so it's never an under-estimate long rp = readPos(); long wp = writePos(); state.put("capacity", capacity()); state.put("population", wp - rp); state.put("write_pos", wp); state.put("read_pos", rp); return state; } public static class ObjectEventFactory implements EventFactory<MutableObject> { @Override public MutableObject newInstance() { return new MutableObject(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.repair; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import java.util.UUID; import com.google.common.collect.Iterables; import org.junit.BeforeClass; import org.junit.Test; import org.apache.cassandra.SchemaLoader; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.Keyspace; import org.apache.cassandra.dht.IPartitioner; import org.apache.cassandra.dht.Murmur3Partitioner; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.schema.KeyspaceParams; import org.apache.cassandra.schema.Schema; import org.apache.cassandra.schema.TableId; import org.apache.cassandra.service.ActiveRepairService; import org.apache.cassandra.streaming.StreamCoordinator; import org.apache.cassandra.streaming.async.NettyStreamingConnectionFactory; import org.apache.cassandra.streaming.StreamPlan; import org.apache.cassandra.streaming.PreviewKind; import org.apache.cassandra.streaming.StreamSession; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.MerkleTree; import org.apache.cassandra.utils.MerkleTrees; import org.apache.cassandra.utils.UUIDGen; import static org.apache.cassandra.service.ActiveRepairService.NO_PENDING_REPAIR; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class LocalSyncTaskTest extends AbstractRepairTest { private static final IPartitioner partitioner = Murmur3Partitioner.instance; private static final InetAddressAndPort local = FBUtilities.getBroadcastAddressAndPort(); public static final String KEYSPACE1 = "DifferencerTest"; public static final String CF_STANDARD = "Standard1"; public static ColumnFamilyStore cfs; @BeforeClass public static void defineSchema() { SchemaLoader.prepareServer(); SchemaLoader.createKeyspace(KEYSPACE1, KeyspaceParams.simple(1), SchemaLoader.standardCFMD(KEYSPACE1, CF_STANDARD)); TableId tid = Schema.instance.getTableMetadata(KEYSPACE1, CF_STANDARD).id; cfs = Schema.instance.getColumnFamilyStoreInstance(tid); } /** * When there is no difference between two, SymmetricLocalSyncTask should return stats with 0 difference. */ @Test public void testNoDifference() throws Throwable { final InetAddressAndPort ep2 = InetAddressAndPort.getByName("127.0.0.2"); Range<Token> range = new Range<>(partitioner.getMinimumToken(), partitioner.getRandomToken()); RepairJobDesc desc = new RepairJobDesc(UUID.randomUUID(), UUID.randomUUID(), KEYSPACE1, "Standard1", Arrays.asList(range)); MerkleTrees tree1 = createInitialTree(desc); MerkleTrees tree2 = createInitialTree(desc); // difference the trees // note: we reuse the same endpoint which is bogus in theory but fine here TreeResponse r1 = new TreeResponse(local, tree1); TreeResponse r2 = new TreeResponse(ep2, tree2); LocalSyncTask task = new LocalSyncTask(desc, r1.endpoint, r2.endpoint, MerkleTrees.difference(r1.trees, r2.trees), NO_PENDING_REPAIR, true, true, PreviewKind.NONE); task.run(); assertEquals(0, task.get().numberOfDifferences); } @Test public void testDifference() throws Throwable { Range<Token> range = new Range<>(partitioner.getMinimumToken(), partitioner.getRandomToken()); UUID parentRepairSession = UUID.randomUUID(); Keyspace keyspace = Keyspace.open(KEYSPACE1); ColumnFamilyStore cfs = keyspace.getColumnFamilyStore("Standard1"); ActiveRepairService.instance.registerParentRepairSession(parentRepairSession, FBUtilities.getBroadcastAddressAndPort(), Arrays.asList(cfs), Arrays.asList(range), false, ActiveRepairService.UNREPAIRED_SSTABLE, false, PreviewKind.NONE); RepairJobDesc desc = new RepairJobDesc(parentRepairSession, UUID.randomUUID(), KEYSPACE1, "Standard1", Arrays.asList(range)); MerkleTrees tree1 = createInitialTree(desc); MerkleTrees tree2 = createInitialTree(desc); // change a range in one of the trees Token token = partitioner.midpoint(range.left, range.right); tree1.invalidate(token); MerkleTree.TreeRange changed = tree1.get(token); changed.hash("non-empty hash!".getBytes()); Set<Range<Token>> interesting = new HashSet<>(); interesting.add(changed); // difference the trees // note: we reuse the same endpoint which is bogus in theory but fine here TreeResponse r1 = new TreeResponse(local, tree1); TreeResponse r2 = new TreeResponse(InetAddressAndPort.getByName("127.0.0.2"), tree2); LocalSyncTask task = new LocalSyncTask(desc, r1.endpoint, r2.endpoint, MerkleTrees.difference(r1.trees, r2.trees), NO_PENDING_REPAIR, true, true, PreviewKind.NONE); NettyStreamingConnectionFactory.MAX_CONNECT_ATTEMPTS = 1; try { task.run(); } finally { NettyStreamingConnectionFactory.MAX_CONNECT_ATTEMPTS = 3; } // ensure that the changed range was recorded assertEquals("Wrong differing ranges", interesting.size(), task.stat.numberOfDifferences); } @Test public void fullRepairStreamPlan() throws Exception { UUID sessionID = registerSession(cfs, true, true); ActiveRepairService.ParentRepairSession prs = ActiveRepairService.instance.getParentRepairSession(sessionID); RepairJobDesc desc = new RepairJobDesc(sessionID, UUIDGen.getTimeUUID(), KEYSPACE1, CF_STANDARD, prs.getRanges()); TreeResponse r1 = new TreeResponse(local, createInitialTree(desc, DatabaseDescriptor.getPartitioner())); TreeResponse r2 = new TreeResponse(PARTICIPANT2, createInitialTree(desc, DatabaseDescriptor.getPartitioner())); LocalSyncTask task = new LocalSyncTask(desc, r1.endpoint, r2.endpoint, MerkleTrees.difference(r1.trees, r2.trees), NO_PENDING_REPAIR, true, true, PreviewKind.NONE); StreamPlan plan = task.createStreamPlan(); assertEquals(NO_PENDING_REPAIR, plan.getPendingRepair()); assertTrue(plan.getFlushBeforeTransfer()); } private static void assertNumInOut(StreamPlan plan, int expectedIncoming, int expectedOutgoing) { StreamCoordinator coordinator = plan.getCoordinator(); StreamSession session = Iterables.getOnlyElement(coordinator.getAllStreamSessions()); assertEquals(expectedIncoming, session.getNumRequests()); assertEquals(expectedOutgoing, session.getNumTransfers()); } @Test public void incrementalRepairStreamPlan() throws Exception { UUID sessionID = registerSession(cfs, true, true); ActiveRepairService.ParentRepairSession prs = ActiveRepairService.instance.getParentRepairSession(sessionID); RepairJobDesc desc = new RepairJobDesc(sessionID, UUIDGen.getTimeUUID(), KEYSPACE1, CF_STANDARD, prs.getRanges()); TreeResponse r1 = new TreeResponse(local, createInitialTree(desc, DatabaseDescriptor.getPartitioner())); TreeResponse r2 = new TreeResponse(PARTICIPANT2, createInitialTree(desc, DatabaseDescriptor.getPartitioner())); LocalSyncTask task = new LocalSyncTask(desc, r1.endpoint, r2.endpoint, MerkleTrees.difference(r1.trees, r2.trees), desc.parentSessionId, true, true, PreviewKind.NONE); StreamPlan plan = task.createStreamPlan(); assertEquals(desc.parentSessionId, plan.getPendingRepair()); assertFalse(plan.getFlushBeforeTransfer()); assertNumInOut(plan, 1, 1); } /** * Don't reciprocate streams if the other endpoint is a transient replica */ @Test public void transientRemoteStreamPlan() throws NoSuchRepairSessionException { UUID sessionID = registerSession(cfs, true, true); ActiveRepairService.ParentRepairSession prs = ActiveRepairService.instance.getParentRepairSession(sessionID); RepairJobDesc desc = new RepairJobDesc(sessionID, UUIDGen.getTimeUUID(), KEYSPACE1, CF_STANDARD, prs.getRanges()); TreeResponse r1 = new TreeResponse(local, createInitialTree(desc, DatabaseDescriptor.getPartitioner())); TreeResponse r2 = new TreeResponse(PARTICIPANT2, createInitialTree(desc, DatabaseDescriptor.getPartitioner())); LocalSyncTask task = new LocalSyncTask(desc, r1.endpoint, r2.endpoint, MerkleTrees.difference(r1.trees, r2.trees), desc.parentSessionId, true, false, PreviewKind.NONE); StreamPlan plan = task.createStreamPlan(); assertNumInOut(plan, 1, 0); } /** * Don't request streams if the other endpoint is a transient replica */ @Test public void transientLocalStreamPlan() throws NoSuchRepairSessionException { UUID sessionID = registerSession(cfs, true, true); ActiveRepairService.ParentRepairSession prs = ActiveRepairService.instance.getParentRepairSession(sessionID); RepairJobDesc desc = new RepairJobDesc(sessionID, UUIDGen.getTimeUUID(), KEYSPACE1, CF_STANDARD, prs.getRanges()); TreeResponse r1 = new TreeResponse(local, createInitialTree(desc, DatabaseDescriptor.getPartitioner())); TreeResponse r2 = new TreeResponse(PARTICIPANT2, createInitialTree(desc, DatabaseDescriptor.getPartitioner())); LocalSyncTask task = new LocalSyncTask(desc, r1.endpoint, r2.endpoint, MerkleTrees.difference(r1.trees, r2.trees), desc.parentSessionId, false, true, PreviewKind.NONE); StreamPlan plan = task.createStreamPlan(); assertNumInOut(plan, 0, 1); } private MerkleTrees createInitialTree(RepairJobDesc desc, IPartitioner partitioner) { MerkleTrees trees = new MerkleTrees(partitioner); trees.addMerkleTrees((int) Math.pow(2, 15), desc.ranges); trees.init(); return trees; } private MerkleTrees createInitialTree(RepairJobDesc desc) { return createInitialTree(desc, partitioner); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.common; import java.net.InetAddress; import java.net.UnknownHostException; import org.apache.rocketmq.common.annotation.ImportantField; import org.apache.rocketmq.common.constant.LoggerName; import org.apache.rocketmq.common.constant.PermName; import org.apache.rocketmq.logging.InternalLogger; import org.apache.rocketmq.logging.InternalLoggerFactory; import org.apache.rocketmq.remoting.common.RemotingUtil; public class BrokerConfig { private static final InternalLogger log = InternalLoggerFactory.getLogger(LoggerName.COMMON_LOGGER_NAME); private String rocketmqHome = System.getProperty(MixAll.ROCKETMQ_HOME_PROPERTY, System.getenv(MixAll.ROCKETMQ_HOME_ENV)); @ImportantField private String namesrvAddr = System.getProperty(MixAll.NAMESRV_ADDR_PROPERTY, System.getenv(MixAll.NAMESRV_ADDR_ENV)); @ImportantField private String brokerIP1 = RemotingUtil.getLocalAddress(); private String brokerIP2 = RemotingUtil.getLocalAddress(); @ImportantField private String brokerName = localHostName(); @ImportantField private String brokerClusterName = "DefaultCluster"; @ImportantField private long brokerId = MixAll.MASTER_ID; private int brokerPermission = PermName.PERM_READ | PermName.PERM_WRITE; private int defaultTopicQueueNums = 8; @ImportantField private boolean autoCreateTopicEnable = true; private boolean clusterTopicEnable = true; private boolean brokerTopicEnable = true; @ImportantField private boolean autoCreateSubscriptionGroup = true; private String messageStorePlugIn = ""; @ImportantField private String msgTraceTopicName = MixAll.RMQ_SYS_TRACE_TOPIC; @ImportantField private boolean traceTopicEnable = false; /** * thread numbers for send message thread pool, since spin lock will be used by default since 4.0.x, the default * value is 1. */ private int sendMessageThreadPoolNums = 1; //16 + Runtime.getRuntime().availableProcessors() * 4; private int pullMessageThreadPoolNums = 16 + Runtime.getRuntime().availableProcessors() * 2; private int processReplyMessageThreadPoolNums = 16 + Runtime.getRuntime().availableProcessors() * 2; private int queryMessageThreadPoolNums = 8 + Runtime.getRuntime().availableProcessors(); private int adminBrokerThreadPoolNums = 16; private int clientManageThreadPoolNums = 32; private int consumerManageThreadPoolNums = 32; private int heartbeatThreadPoolNums = Math.min(32, Runtime.getRuntime().availableProcessors()); /** * Thread numbers for EndTransactionProcessor */ private int endTransactionThreadPoolNums = 8 + Runtime.getRuntime().availableProcessors() * 2; private int flushConsumerOffsetInterval = 1000 * 5; private int flushConsumerOffsetHistoryInterval = 1000 * 60; @ImportantField private boolean rejectTransactionMessage = false; @ImportantField private boolean fetchNamesrvAddrByAddressServer = false; private int sendThreadPoolQueueCapacity = 10000; private int pullThreadPoolQueueCapacity = 100000; private int replyThreadPoolQueueCapacity = 10000; private int queryThreadPoolQueueCapacity = 20000; private int clientManagerThreadPoolQueueCapacity = 1000000; private int consumerManagerThreadPoolQueueCapacity = 1000000; private int heartbeatThreadPoolQueueCapacity = 50000; private int endTransactionPoolQueueCapacity = 100000; private int filterServerNums = 0; private boolean longPollingEnable = true; private long shortPollingTimeMills = 1000; private boolean notifyConsumerIdsChangedEnable = true; private boolean highSpeedMode = false; private boolean commercialEnable = true; private int commercialTimerCount = 1; private int commercialTransCount = 1; private int commercialBigCount = 1; private int commercialBaseCount = 1; private boolean transferMsgByHeap = true; private int maxDelayTime = 40; private String regionId = MixAll.DEFAULT_TRACE_REGION_ID; private int registerBrokerTimeoutMills = 6000; private boolean slaveReadEnable = false; private boolean disableConsumeIfConsumerReadSlowly = false; private long consumerFallbehindThreshold = 1024L * 1024 * 1024 * 16; private boolean brokerFastFailureEnable = true; private long waitTimeMillsInSendQueue = 200; private long waitTimeMillsInPullQueue = 5 * 1000; private long waitTimeMillsInHeartbeatQueue = 31 * 1000; private long waitTimeMillsInTransactionQueue = 3 * 1000; private long startAcceptSendRequestTimeStamp = 0L; private boolean traceOn = true; // Switch of filter bit map calculation. // If switch on: // 1. Calculate filter bit map when construct queue. // 2. Filter bit map will be saved to consume queue extend file if allowed. private boolean enableCalcFilterBitMap = false; // Expect num of consumers will use filter. private int expectConsumerNumUseFilter = 32; // Error rate of bloom filter, 1~100. private int maxErrorRateOfBloomFilter = 20; //how long to clean filter data after dead.Default: 24h private long filterDataCleanTimeSpan = 24 * 3600 * 1000; // whether do filter when retry. private boolean filterSupportRetry = false; private boolean enablePropertyFilter = false; private boolean compressedRegister = false; private boolean forceRegister = true; /** * This configurable item defines interval of topics registration of broker to name server. Allowing values are * between 10, 000 and 60, 000 milliseconds. */ private int registerNameServerPeriod = 1000 * 30; /** * The minimum time of the transactional message to be checked firstly, one message only exceed this time interval * that can be checked. */ @ImportantField private long transactionTimeOut = 6 * 1000; /** * The maximum number of times the message was checked, if exceed this value, this message will be discarded. */ @ImportantField private int transactionCheckMax = 15; /** * Transaction message check interval. */ @ImportantField private long transactionCheckInterval = 60 * 1000; /** * Acl feature switch */ @ImportantField private boolean aclEnable = false; private boolean storeReplyMessageEnable = true; public static String localHostName() { try { return InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { log.error("Failed to obtain the host name", e); } return "DEFAULT_BROKER"; } public boolean isTraceOn() { return traceOn; } public void setTraceOn(final boolean traceOn) { this.traceOn = traceOn; } public long getStartAcceptSendRequestTimeStamp() { return startAcceptSendRequestTimeStamp; } public void setStartAcceptSendRequestTimeStamp(final long startAcceptSendRequestTimeStamp) { this.startAcceptSendRequestTimeStamp = startAcceptSendRequestTimeStamp; } public long getWaitTimeMillsInSendQueue() { return waitTimeMillsInSendQueue; } public void setWaitTimeMillsInSendQueue(final long waitTimeMillsInSendQueue) { this.waitTimeMillsInSendQueue = waitTimeMillsInSendQueue; } public long getConsumerFallbehindThreshold() { return consumerFallbehindThreshold; } public void setConsumerFallbehindThreshold(final long consumerFallbehindThreshold) { this.consumerFallbehindThreshold = consumerFallbehindThreshold; } public boolean isBrokerFastFailureEnable() { return brokerFastFailureEnable; } public void setBrokerFastFailureEnable(final boolean brokerFastFailureEnable) { this.brokerFastFailureEnable = brokerFastFailureEnable; } public long getWaitTimeMillsInPullQueue() { return waitTimeMillsInPullQueue; } public void setWaitTimeMillsInPullQueue(final long waitTimeMillsInPullQueue) { this.waitTimeMillsInPullQueue = waitTimeMillsInPullQueue; } public boolean isDisableConsumeIfConsumerReadSlowly() { return disableConsumeIfConsumerReadSlowly; } public void setDisableConsumeIfConsumerReadSlowly(final boolean disableConsumeIfConsumerReadSlowly) { this.disableConsumeIfConsumerReadSlowly = disableConsumeIfConsumerReadSlowly; } public boolean isSlaveReadEnable() { return slaveReadEnable; } public void setSlaveReadEnable(final boolean slaveReadEnable) { this.slaveReadEnable = slaveReadEnable; } public int getRegisterBrokerTimeoutMills() { return registerBrokerTimeoutMills; } public void setRegisterBrokerTimeoutMills(final int registerBrokerTimeoutMills) { this.registerBrokerTimeoutMills = registerBrokerTimeoutMills; } public String getRegionId() { return regionId; } public void setRegionId(final String regionId) { this.regionId = regionId; } public boolean isTransferMsgByHeap() { return transferMsgByHeap; } public void setTransferMsgByHeap(final boolean transferMsgByHeap) { this.transferMsgByHeap = transferMsgByHeap; } public String getMessageStorePlugIn() { return messageStorePlugIn; } public void setMessageStorePlugIn(String messageStorePlugIn) { this.messageStorePlugIn = messageStorePlugIn; } public boolean isHighSpeedMode() { return highSpeedMode; } public void setHighSpeedMode(final boolean highSpeedMode) { this.highSpeedMode = highSpeedMode; } public String getRocketmqHome() { return rocketmqHome; } public void setRocketmqHome(String rocketmqHome) { this.rocketmqHome = rocketmqHome; } public String getBrokerName() { return brokerName; } public void setBrokerName(String brokerName) { this.brokerName = brokerName; } public int getBrokerPermission() { return brokerPermission; } public void setBrokerPermission(int brokerPermission) { this.brokerPermission = brokerPermission; } public int getDefaultTopicQueueNums() { return defaultTopicQueueNums; } public void setDefaultTopicQueueNums(int defaultTopicQueueNums) { this.defaultTopicQueueNums = defaultTopicQueueNums; } public boolean isAutoCreateTopicEnable() { return autoCreateTopicEnable; } public void setAutoCreateTopicEnable(boolean autoCreateTopic) { this.autoCreateTopicEnable = autoCreateTopic; } public String getBrokerClusterName() { return brokerClusterName; } public void setBrokerClusterName(String brokerClusterName) { this.brokerClusterName = brokerClusterName; } public String getBrokerIP1() { return brokerIP1; } public void setBrokerIP1(String brokerIP1) { this.brokerIP1 = brokerIP1; } public String getBrokerIP2() { return brokerIP2; } public void setBrokerIP2(String brokerIP2) { this.brokerIP2 = brokerIP2; } public int getSendMessageThreadPoolNums() { return sendMessageThreadPoolNums; } public void setSendMessageThreadPoolNums(int sendMessageThreadPoolNums) { this.sendMessageThreadPoolNums = sendMessageThreadPoolNums; } public int getPullMessageThreadPoolNums() { return pullMessageThreadPoolNums; } public void setPullMessageThreadPoolNums(int pullMessageThreadPoolNums) { this.pullMessageThreadPoolNums = pullMessageThreadPoolNums; } public int getProcessReplyMessageThreadPoolNums() { return processReplyMessageThreadPoolNums; } public void setProcessReplyMessageThreadPoolNums(int processReplyMessageThreadPoolNums) { this.processReplyMessageThreadPoolNums = processReplyMessageThreadPoolNums; } public int getQueryMessageThreadPoolNums() { return queryMessageThreadPoolNums; } public void setQueryMessageThreadPoolNums(final int queryMessageThreadPoolNums) { this.queryMessageThreadPoolNums = queryMessageThreadPoolNums; } public int getAdminBrokerThreadPoolNums() { return adminBrokerThreadPoolNums; } public void setAdminBrokerThreadPoolNums(int adminBrokerThreadPoolNums) { this.adminBrokerThreadPoolNums = adminBrokerThreadPoolNums; } public int getFlushConsumerOffsetInterval() { return flushConsumerOffsetInterval; } public void setFlushConsumerOffsetInterval(int flushConsumerOffsetInterval) { this.flushConsumerOffsetInterval = flushConsumerOffsetInterval; } public int getFlushConsumerOffsetHistoryInterval() { return flushConsumerOffsetHistoryInterval; } public void setFlushConsumerOffsetHistoryInterval(int flushConsumerOffsetHistoryInterval) { this.flushConsumerOffsetHistoryInterval = flushConsumerOffsetHistoryInterval; } public boolean isClusterTopicEnable() { return clusterTopicEnable; } public void setClusterTopicEnable(boolean clusterTopicEnable) { this.clusterTopicEnable = clusterTopicEnable; } public String getNamesrvAddr() { return namesrvAddr; } public void setNamesrvAddr(String namesrvAddr) { this.namesrvAddr = namesrvAddr; } public long getBrokerId() { return brokerId; } public void setBrokerId(long brokerId) { this.brokerId = brokerId; } public boolean isAutoCreateSubscriptionGroup() { return autoCreateSubscriptionGroup; } public void setAutoCreateSubscriptionGroup(boolean autoCreateSubscriptionGroup) { this.autoCreateSubscriptionGroup = autoCreateSubscriptionGroup; } public boolean isRejectTransactionMessage() { return rejectTransactionMessage; } public void setRejectTransactionMessage(boolean rejectTransactionMessage) { this.rejectTransactionMessage = rejectTransactionMessage; } public boolean isFetchNamesrvAddrByAddressServer() { return fetchNamesrvAddrByAddressServer; } public void setFetchNamesrvAddrByAddressServer(boolean fetchNamesrvAddrByAddressServer) { this.fetchNamesrvAddrByAddressServer = fetchNamesrvAddrByAddressServer; } public int getSendThreadPoolQueueCapacity() { return sendThreadPoolQueueCapacity; } public void setSendThreadPoolQueueCapacity(int sendThreadPoolQueueCapacity) { this.sendThreadPoolQueueCapacity = sendThreadPoolQueueCapacity; } public int getPullThreadPoolQueueCapacity() { return pullThreadPoolQueueCapacity; } public void setPullThreadPoolQueueCapacity(int pullThreadPoolQueueCapacity) { this.pullThreadPoolQueueCapacity = pullThreadPoolQueueCapacity; } public int getReplyThreadPoolQueueCapacity() { return replyThreadPoolQueueCapacity; } public void setReplyThreadPoolQueueCapacity(int replyThreadPoolQueueCapacity) { this.replyThreadPoolQueueCapacity = replyThreadPoolQueueCapacity; } public int getQueryThreadPoolQueueCapacity() { return queryThreadPoolQueueCapacity; } public void setQueryThreadPoolQueueCapacity(final int queryThreadPoolQueueCapacity) { this.queryThreadPoolQueueCapacity = queryThreadPoolQueueCapacity; } public boolean isBrokerTopicEnable() { return brokerTopicEnable; } public void setBrokerTopicEnable(boolean brokerTopicEnable) { this.brokerTopicEnable = brokerTopicEnable; } public int getFilterServerNums() { return filterServerNums; } public void setFilterServerNums(int filterServerNums) { this.filterServerNums = filterServerNums; } public boolean isLongPollingEnable() { return longPollingEnable; } public void setLongPollingEnable(boolean longPollingEnable) { this.longPollingEnable = longPollingEnable; } public boolean isNotifyConsumerIdsChangedEnable() { return notifyConsumerIdsChangedEnable; } public void setNotifyConsumerIdsChangedEnable(boolean notifyConsumerIdsChangedEnable) { this.notifyConsumerIdsChangedEnable = notifyConsumerIdsChangedEnable; } public long getShortPollingTimeMills() { return shortPollingTimeMills; } public void setShortPollingTimeMills(long shortPollingTimeMills) { this.shortPollingTimeMills = shortPollingTimeMills; } public int getClientManageThreadPoolNums() { return clientManageThreadPoolNums; } public void setClientManageThreadPoolNums(int clientManageThreadPoolNums) { this.clientManageThreadPoolNums = clientManageThreadPoolNums; } public boolean isCommercialEnable() { return commercialEnable; } public void setCommercialEnable(final boolean commercialEnable) { this.commercialEnable = commercialEnable; } public int getCommercialTimerCount() { return commercialTimerCount; } public void setCommercialTimerCount(final int commercialTimerCount) { this.commercialTimerCount = commercialTimerCount; } public int getCommercialTransCount() { return commercialTransCount; } public void setCommercialTransCount(final int commercialTransCount) { this.commercialTransCount = commercialTransCount; } public int getCommercialBigCount() { return commercialBigCount; } public void setCommercialBigCount(final int commercialBigCount) { this.commercialBigCount = commercialBigCount; } public int getMaxDelayTime() { return maxDelayTime; } public void setMaxDelayTime(final int maxDelayTime) { this.maxDelayTime = maxDelayTime; } public int getClientManagerThreadPoolQueueCapacity() { return clientManagerThreadPoolQueueCapacity; } public void setClientManagerThreadPoolQueueCapacity(int clientManagerThreadPoolQueueCapacity) { this.clientManagerThreadPoolQueueCapacity = clientManagerThreadPoolQueueCapacity; } public int getConsumerManagerThreadPoolQueueCapacity() { return consumerManagerThreadPoolQueueCapacity; } public void setConsumerManagerThreadPoolQueueCapacity(int consumerManagerThreadPoolQueueCapacity) { this.consumerManagerThreadPoolQueueCapacity = consumerManagerThreadPoolQueueCapacity; } public int getConsumerManageThreadPoolNums() { return consumerManageThreadPoolNums; } public void setConsumerManageThreadPoolNums(int consumerManageThreadPoolNums) { this.consumerManageThreadPoolNums = consumerManageThreadPoolNums; } public int getCommercialBaseCount() { return commercialBaseCount; } public void setCommercialBaseCount(int commercialBaseCount) { this.commercialBaseCount = commercialBaseCount; } public boolean isEnableCalcFilterBitMap() { return enableCalcFilterBitMap; } public void setEnableCalcFilterBitMap(boolean enableCalcFilterBitMap) { this.enableCalcFilterBitMap = enableCalcFilterBitMap; } public int getExpectConsumerNumUseFilter() { return expectConsumerNumUseFilter; } public void setExpectConsumerNumUseFilter(int expectConsumerNumUseFilter) { this.expectConsumerNumUseFilter = expectConsumerNumUseFilter; } public int getMaxErrorRateOfBloomFilter() { return maxErrorRateOfBloomFilter; } public void setMaxErrorRateOfBloomFilter(int maxErrorRateOfBloomFilter) { this.maxErrorRateOfBloomFilter = maxErrorRateOfBloomFilter; } public long getFilterDataCleanTimeSpan() { return filterDataCleanTimeSpan; } public void setFilterDataCleanTimeSpan(long filterDataCleanTimeSpan) { this.filterDataCleanTimeSpan = filterDataCleanTimeSpan; } public boolean isFilterSupportRetry() { return filterSupportRetry; } public void setFilterSupportRetry(boolean filterSupportRetry) { this.filterSupportRetry = filterSupportRetry; } public boolean isEnablePropertyFilter() { return enablePropertyFilter; } public void setEnablePropertyFilter(boolean enablePropertyFilter) { this.enablePropertyFilter = enablePropertyFilter; } public boolean isCompressedRegister() { return compressedRegister; } public void setCompressedRegister(boolean compressedRegister) { this.compressedRegister = compressedRegister; } public boolean isForceRegister() { return forceRegister; } public void setForceRegister(boolean forceRegister) { this.forceRegister = forceRegister; } public int getHeartbeatThreadPoolQueueCapacity() { return heartbeatThreadPoolQueueCapacity; } public void setHeartbeatThreadPoolQueueCapacity(int heartbeatThreadPoolQueueCapacity) { this.heartbeatThreadPoolQueueCapacity = heartbeatThreadPoolQueueCapacity; } public int getHeartbeatThreadPoolNums() { return heartbeatThreadPoolNums; } public void setHeartbeatThreadPoolNums(int heartbeatThreadPoolNums) { this.heartbeatThreadPoolNums = heartbeatThreadPoolNums; } public long getWaitTimeMillsInHeartbeatQueue() { return waitTimeMillsInHeartbeatQueue; } public void setWaitTimeMillsInHeartbeatQueue(long waitTimeMillsInHeartbeatQueue) { this.waitTimeMillsInHeartbeatQueue = waitTimeMillsInHeartbeatQueue; } public int getRegisterNameServerPeriod() { return registerNameServerPeriod; } public void setRegisterNameServerPeriod(int registerNameServerPeriod) { this.registerNameServerPeriod = registerNameServerPeriod; } public long getTransactionTimeOut() { return transactionTimeOut; } public void setTransactionTimeOut(long transactionTimeOut) { this.transactionTimeOut = transactionTimeOut; } public int getTransactionCheckMax() { return transactionCheckMax; } public void setTransactionCheckMax(int transactionCheckMax) { this.transactionCheckMax = transactionCheckMax; } public long getTransactionCheckInterval() { return transactionCheckInterval; } public void setTransactionCheckInterval(long transactionCheckInterval) { this.transactionCheckInterval = transactionCheckInterval; } public int getEndTransactionThreadPoolNums() { return endTransactionThreadPoolNums; } public void setEndTransactionThreadPoolNums(int endTransactionThreadPoolNums) { this.endTransactionThreadPoolNums = endTransactionThreadPoolNums; } public int getEndTransactionPoolQueueCapacity() { return endTransactionPoolQueueCapacity; } public void setEndTransactionPoolQueueCapacity(int endTransactionPoolQueueCapacity) { this.endTransactionPoolQueueCapacity = endTransactionPoolQueueCapacity; } public long getWaitTimeMillsInTransactionQueue() { return waitTimeMillsInTransactionQueue; } public void setWaitTimeMillsInTransactionQueue(long waitTimeMillsInTransactionQueue) { this.waitTimeMillsInTransactionQueue = waitTimeMillsInTransactionQueue; } public String getMsgTraceTopicName() { return msgTraceTopicName; } public void setMsgTraceTopicName(String msgTraceTopicName) { this.msgTraceTopicName = msgTraceTopicName; } public boolean isTraceTopicEnable() { return traceTopicEnable; } public void setTraceTopicEnable(boolean traceTopicEnable) { this.traceTopicEnable = traceTopicEnable; } public boolean isAclEnable() { return aclEnable; } public void setAclEnable(boolean aclEnable) { this.aclEnable = aclEnable; } public boolean isStoreReplyMessageEnable() { return storeReplyMessageEnable; } public void setStoreReplyMessageEnable(boolean storeReplyMessageEnable) { this.storeReplyMessageEnable = storeReplyMessageEnable; } }
/////////////////////////////////////////////////////////////////////////////// // AUTHOR: Henry Pinkard, henry.pinkard@gmail.com // // COPYRIGHT: University of California, San Francisco, 2015 // // LICENSE: This file is distributed under the BSD license. // License text is included with the source distribution. // // This file is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty // of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. // // IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES. // package org.micromanager.plugins.magellan.acq; import java.awt.geom.Point2D; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.TreeMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ThreadFactory; import org.apache.commons.math3.analysis.interpolation.LinearInterpolator; import org.apache.commons.math3.analysis.polynomials.PolynomialSplineFunction; import org.micromanager.plugins.magellan.coordinates.XYStagePosition; import org.micromanager.plugins.magellan.gui.GUI; import org.micromanager.plugins.magellan.main.Magellan; import org.micromanager.plugins.magellan.misc.GlobalSettings; import org.micromanager.plugins.magellan.misc.Log; public class AcqDurationEstimator { private static int MAX_DATA_POINTS = 10; private static final String EXPOSURE_KEY = "Exposure_Time_Map"; private static final String XY_KEY = "XY_Time_Map"; private static final String Z_KEY = "ZMove_Time_Map"; private static final String CHANNEL_KEY = "Channel_Switch_Time_Map"; private ExecutorService executor_; private Future<?> currentTask_; private TreeMap<Double, LinkedList<Double>> exposureMap_; // hold entries for interpolation private LinkedList<Double> xyMoveTimeList_, zStepMoveTimeList_, channelSwitchTimeList_; public AcqDurationEstimator() { executor_ = Executors.newSingleThreadExecutor(new ThreadFactory() { @Override public Thread newThread(Runnable r) { return new Thread(r, "Acquisition duration estimation Thread"); } }); //populate with one from preferences exposureMap_ = GlobalSettings.getObjectFromPrefs(GlobalSettings.getInstance().getGlobalPrefernces(), EXPOSURE_KEY, new TreeMap<Double, LinkedList<Double>>()); xyMoveTimeList_ = GlobalSettings.getObjectFromPrefs(GlobalSettings.getInstance().getGlobalPrefernces(), XY_KEY, new LinkedList<Double>()); zStepMoveTimeList_ = GlobalSettings.getObjectFromPrefs(GlobalSettings.getInstance().getGlobalPrefernces(), Z_KEY, new LinkedList<Double>()); channelSwitchTimeList_ = GlobalSettings.getObjectFromPrefs(GlobalSettings.getInstance().getGlobalPrefernces(), CHANNEL_KEY, new LinkedList<Double>()); } public void storeChannelSwitchTime(double time) { channelSwitchTimeList_.add(time); if (channelSwitchTimeList_.size() > MAX_DATA_POINTS) { channelSwitchTimeList_.removeFirst(); } } public void storeZMoveTime(double time) { zStepMoveTimeList_.add(time); if (zStepMoveTimeList_.size() > MAX_DATA_POINTS) { zStepMoveTimeList_.removeFirst(); } } public void storeXYMoveTime(double time) { xyMoveTimeList_.add(time); if (xyMoveTimeList_.size() > MAX_DATA_POINTS) { xyMoveTimeList_.removeFirst(); } } public void storeImageAcquisitionTime(double exposure, double time) { if (!exposureMap_.containsKey(exposure)) { exposureMap_.put(exposure, new LinkedList<Double>()); } exposureMap_.get(exposure).add(time); //limit size if (exposureMap_.get(exposure).size() > MAX_DATA_POINTS) { exposureMap_.get(exposure).removeFirst(); } } public double averageList(LinkedList<Double> list) { double avgTime = 0; for (double t : list) { avgTime += t; } avgTime /= list.size(); return avgTime; } public double estimateImageAcquisitionTime(double exposure) { double[] exposures = new double[exposureMap_.keySet().size()]; double[] avgTimes = new double[exposureMap_.keySet().size()]; int i = 0; for (double e : exposureMap_.keySet()) { exposures[i] = e; LinkedList<Double> times = exposureMap_.get(e); double avgTime = 0; for (double t : times) { avgTime += t; } avgTime /= times.size(); avgTimes[i] = avgTime; i++; } return interpolateOrExtrapolate(exposures, avgTimes, exposure); } private double interpolateOrExtrapolate(double[] x, double[] y, double xVal) { if (x.length == 1) { return y[0]; } LinearInterpolator interpolator = new LinearInterpolator(); PolynomialSplineFunction interpolant = interpolator.interpolate(x, y); if (xVal < interpolant.getKnots()[0]) { return interpolant.getKnots()[0]; } else if (xVal > interpolant.getKnots()[interpolant.getN() - 1]) { return interpolant.getKnots()[interpolant.getN() - 1]; } else { return interpolant.value(xVal); } } public synchronized void calcAcqDuration(FixedAreaAcquisitionSettings settings) { if (currentTask_ != null && !currentTask_.isDone()) { currentTask_.cancel(true); } currentTask_ = executor_.submit(estimateDuration(settings)); } private void checkForInterrupt() throws InterruptedException { if (Thread.interrupted()) { throw new InterruptedException(); } } private Runnable estimateDuration(final FixedAreaAcquisitionSettings settings) { return new Runnable() { @Override public void run() { try { int dir = Magellan.getCore().getFocusDirection(Magellan.getCore().getFocusDevice()); boolean towardsSampleIsPositive; if (dir > 0) { towardsSampleIsPositive = true; } else if (dir < 0) { towardsSampleIsPositive = false; } else { GUI.updateEstiamtedDurationLabel("Error: focus direction undefined"); return; } double imageTime = estimateImageAcquisitionTime(Magellan.getCore().getExposure()); checkForInterrupt(); double xyMoveTime = averageList(xyMoveTimeList_); checkForInterrupt(); double zMoveTime = averageList(zStepMoveTimeList_); checkForInterrupt(); double channelMoveTime = averageList(channelSwitchTimeList_); checkForInterrupt(); List<XYStagePosition> positions = getXYPositions(settings); int numImages = 0, xyMoves = 0, zMoves = 0, channelSwitches = 0; double zOrigin = FixedAreaAcquisition.getZTopCoordinate(settings.spaceMode_, settings, towardsSampleIsPositive, false, 0, 0, Magellan.getCore().getFocusDevice()); for (XYStagePosition pos : positions) { int sliceIndex = 0; if (!FixedAreaAcquisition.isImagingVolumeUndefinedAtPosition(settings.spaceMode_, settings, pos)) { xyMoves++; } while (true) { checkForInterrupt(); double zPos = zOrigin + sliceIndex * settings.zStep_; if ((settings.spaceMode_ == FixedAreaAcquisitionSettings.REGION_2D || settings.spaceMode_ == FixedAreaAcquisitionSettings.NO_SPACE) && sliceIndex > 0) { numImages++; xyMoves++; break; //2D regions only have 1 slice } if (FixedAreaAcquisition.isImagingVolumeUndefinedAtPosition(settings.spaceMode_, settings, pos)) { break; } if (FixedAreaAcquisition.isZBelowImagingVolume(settings.spaceMode_, settings, pos, zPos, zOrigin)) { //position is below z stack or limit of focus device, z stack finished break; } //3D region if (FixedAreaAcquisition.isZAboveImagingVolume(settings.spaceMode_, settings, pos, zPos, zOrigin)) { sliceIndex++; continue; //position is above imaging volume or range of focus device } numImages++; for (int channelIndex = 0; channelIndex < settings.channels_.size(); channelIndex++) { if (!settings.channels_.get(channelIndex).uniqueEvent_ || !settings.channels_.get(channelIndex).use_) { continue; } channelSwitches++; if (channelIndex > 0) { numImages++; } } sliceIndex++; zMoves++; } //slice loop finish } double estimatedTime = numImages * imageTime + xyMoveTime * xyMoves + zMoveTime * zMoves + channelMoveTime * channelSwitches; if (settings.timeEnabled_) { estimatedTime = settings.numTimePoints_ * Math.max(estimatedTime, settings.timePointInterval_ * (settings.timeIntervalUnit_ == 1 ? 1000 : (settings.timeIntervalUnit_ == 2 ? 60000 : 1))); } long hours = (long) (estimatedTime / 60 / 60 / 1000), minutes = (long) (estimatedTime / 60 / 1000), seconds = (long) (estimatedTime / 1000); minutes = minutes%60; seconds = seconds%60; String h = ("0" + hours).substring(("0"+hours).length() - 2); String m = ("0" + (minutes )).substring(("0"+minutes).length() - 2); String s = ("0" + (seconds)).substring(("0"+seconds).length() - 2); GUI.updateEstiamtedDurationLabel("Estimated duration: " + h + ":" + m + ":" + s + " (H:M:S)"); //store GlobalSettings.putObjectInPrefs(GlobalSettings.getInstance().getGlobalPrefernces(), EXPOSURE_KEY, exposureMap_); GlobalSettings.putObjectInPrefs(GlobalSettings.getInstance().getGlobalPrefernces(), XY_KEY, xyMoveTimeList_); GlobalSettings.putObjectInPrefs(GlobalSettings.getInstance().getGlobalPrefernces(), Z_KEY, zStepMoveTimeList_); GlobalSettings.putObjectInPrefs(GlobalSettings.getInstance().getGlobalPrefernces(), CHANNEL_KEY, channelSwitchTimeList_); } catch (InterruptedException ex) { return; //Interrupted, return with no result } catch (Exception e) { GUI.updateEstiamtedDurationLabel("Error estimting acquisiton duration"); return; } } }; } private List<XYStagePosition> getXYPositions(FixedAreaAcquisitionSettings settings) throws Exception, InterruptedException { List<XYStagePosition> list; if (settings.spaceMode_ == FixedAreaAcquisitionSettings.SURFACE_FIXED_DISTANCE_Z_STACK) { list = settings.footprint_.getXYPositionsNoUpdate(); } else if (settings.spaceMode_ == FixedAreaAcquisitionSettings.VOLUME_BETWEEN_SURFACES_Z_STACK) { list = settings.useTopOrBottomFootprint_ == FixedAreaAcquisitionSettings.FOOTPRINT_FROM_TOP ? settings.topSurface_.getXYPositionsNoUpdate() : settings.bottomSurface_.getXYPositionsNoUpdate(); } else if (settings.spaceMode_ == FixedAreaAcquisitionSettings.SIMPLE_Z_STACK) { list = settings.footprint_.getXYPositionsNoUpdate(); } else if (settings.spaceMode_ == FixedAreaAcquisitionSettings.REGION_2D) { list = settings.footprint_.getXYPositionsNoUpdate(); } else { list = new ArrayList<XYStagePosition>(); list.add(new XYStagePosition(new Point2D.Double(), 0, 0)); } return list; } }
/** * Copyright 2010 - 2013 CosmoCode GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.cosmocode.collections.utility; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Set; /** * Static factory providing methods * which return utility views on all kind of * collections. All of the following interfaces * are supported: * <ul> * <li>{@link UtilityIterable}</li> * <li>{@link UtilityIterator}</li> * <li>{@link UtilityCollection}</li> * <li>{@link UtilitySet}</li> * <li>{@link UtilityList}</li> * <li>{@link UtilityListIterator}</li> * <li>{@link UtilityMap}</li> * </ul> * * @author Willi Schoenborn */ public final class Utility { /** * Prevent instantiation. */ private Utility() { } /** * Creates a {@link UtilityIterable} view on an {@link Iterable}. * * @param <E> the generic element type * @param iterable the {@link Iterable} which will used as a {@link UtilityIterable} * @return a new {@link UtilityIterable} which will be backed by the given {@link Iterable} or * iterable if it is already an instance of {@link UtilityIterable} * @throws NullPointerException if iterable is null */ public static <E> UtilityIterable<E> asUtilityIterable(Iterable<E> iterable) { if (iterable == null) throw new NullPointerException("Iterable must not be null"); if (iterable instanceof UtilityIterable<?>) { return (UtilityIterable<E>) iterable; } else { return new DefaultUtilityIterable<E>(iterable); } } /** * Creates a {@link UtilityIterator} view on an {@link Iterator}. * * @param <E> the generic element type * @param iterator the {@link Iterator} which will be used as a {@link UtilityIterator} * @return a new {@link UtilityIterator} which will be backed by the given {@link Iterator} or * iterator if it is already an instance of {@link UtilityIterator} * @throws NullPointerException if iterator is null */ public static <E> UtilityIterator<E> asUtilityIterator(Iterator<E> iterator) { if (iterator == null) throw new NullPointerException("Iterator must not be null"); if (iterator instanceof UtilityIterator<?>) { return (UtilityIterator<E>) iterator; } else { return new DefaultUtilityIterator<E>(iterator); } } /** * Creates a {@link UtilityCollection} view on a {@link Collection}. * * @param <E> the generic element type * @param collection the {@link Collection} which will be used as a {@link UtilityCollection} * @return a new {@link UtilityCollection} which will be backed by the given {@link Collection} or * collection if it is already an instance of {@link UtilityCollection} * @throws NullPointerException if collection is null */ public static <E> UtilityCollection<E> asUtilityCollection(Collection<E> collection) { if (collection == null) throw new NullPointerException("Collection must not be null"); if (collection instanceof UtilityCollection<?>) { return (UtilityCollection<E>) collection; } else { return new DefaultUtilityCollection<E>(collection); } } /** * Creates a {@link UtilitySet} view on a {@link Set}. * * @param <E> the generic element type * @param set the {@link Set} which will be used as a {@link UtilitySet} * @return a new {@link UtilitySet} which will be backed by the given {@link Set} or * set if it is already an instance of {@link UtilitySet} * @throws NullPointerException if set is null */ public static <E> UtilitySet<E> asUtilitySet(Set<E> set) { if (set == null) throw new NullPointerException("Set must not be null"); if (set instanceof UtilitySet<?>) { return (UtilitySet<E>) set; } else { return new DefaultUtilitySet<E>(set); } } /** * Creates a {@link UtilityList} view on a {@link List}. * * @param <E> the generic element type * @param list the {@link List} which will be used as a {@link UtilityList} * @return a new {@link UtilityList} which will be backed by the given {@link List} or * list if it is already an instance of {@link UtilityList} * @throws NullPointerException if list is null */ public static <E> UtilityList<E> asUtilityList(List<E> list) { if (list == null) throw new NullPointerException("List must not be null"); if (list instanceof UtilityList<?>) { return (UtilityList<E>) list; } else { return new DefaultUtilityList<E>(list); } } /** * Creates a {@link UtilityListIterator} view on a {@link ListIterator}. * * @param <E> the generic element type * @param listIterator the {@link ListIterator} which will be used as a {@link UtilityListIterator} * @return a new {@link UtilityListIterator} which will be backed by the given {@link ListIterator} or * listIerator if it is already an instance of {@link UtilityListIterator} * @throws NullPointerException if listIterator is null */ public static <E> UtilityListIterator<E> asUtilityListIterator(ListIterator<E> listIterator) { if (listIterator == null) throw new NullPointerException("ListIterator must not be null"); if (listIterator instanceof UtilityListIterator<?>) { return (UtilityListIterator<E>) listIterator; } else { return new DefaultUtilityListIterator<E>(listIterator); } } /** * Creates a {@link UtilityMap} view on a {@link Map}. * * @param <K> the generic key type * @param <V> the generic value type * @param map the {@link Map} which will be used as a {@link UtilityMap} * @return a new {@link UtilityMap} which will be backed by the given {@link Map} or * map if it is already an instance of {@link UtilityMap} * @throws NullPointerException if map is null */ public static <K, V> UtilityMap<K, V> asUtilityMap(Map<K, V> map) { if (map == null) throw new NullPointerException("Map must not be null"); if (map instanceof UtilityMap<?, ?>) { return (UtilityMap<K, V>) map; } else { return new DefaultUtilityMap<K, V>(map); } } /** * Creates a new {@link UtilitySet}. * * @param <E> the generic element type * @return a new {@link UtilitySet} */ public static <E> UtilitySet<E> createUtilitySet() { final Set<E> set = Sets.newHashSet(); return Utility.asUtilitySet(set); } /** * Creates a new {@link UtilityList}. * * @param <E> the generic element type * @return a new {@link UtilityList} */ public static <E> UtilityList<E> createUtilityList() { final List<E> list = Lists.newArrayList(); return Utility.asUtilityList(list); } /** * Creates a new {@link UtilityMap}. * * @param <K> the generic key type * @param <V> the generic value type * @return a new {@link UtilityMap} */ public static <K, V> UtilityMap<K, V> createUtilityMap() { final Map<K, V> map = Maps.newHashMap(); return Utility.asUtilityMap(map); } /** * Creates a new {@link UtilityCollection}. * * @param <E> the generic element type * @return a new {@link UtilityCollection} */ public static <E> UtilityCollection<E> createUtilityCollection() { return Utility.<E>createUtilityList(); } }
/* * Copyright 2011 gitblit.com. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gitblit.tests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import org.junit.Test; import com.gitblit.ConfigUserService; import com.gitblit.FileUserService; import com.gitblit.IUserService; import com.gitblit.models.TeamModel; import com.gitblit.models.UserModel; public class UserServiceTest { @Test public void testFileUserService() throws IOException { File file = new File("us-test.properties"); file.delete(); IUserService service = new FileUserService(file); testUsers(service); testTeams(service); file.delete(); } @Test public void testConfigUserService() throws IOException { File file = new File("us-test.conf"); file.delete(); IUserService service = new ConfigUserService(file); testUsers(service); testTeams(service); file.delete(); } protected void testUsers(IUserService service) { UserModel admin = service.getUserModel("admin"); assertTrue(admin == null); // add admin and admins team TeamModel admins = new TeamModel("admins"); admins.mailingLists.add("admins@localhost.com"); admin = new UserModel("admin"); admin.password = "password"; admin.canAdmin = true; admin.excludeFromFederation = true; admin.teams.add(admins); service.updateUserModel(admin); admin = null; admins = null; // add new user UserModel newUser = new UserModel("test"); newUser.password = "testPassword"; newUser.addRepository("repo1"); newUser.addRepository("repo2"); newUser.addRepository("sub/repo3"); service.updateUserModel(newUser); // add one more new user and then test reload of first new user newUser = new UserModel("GARBAGE"); newUser.password = "garbage"; service.updateUserModel(newUser); // confirm all added users assertEquals(3, service.getAllUsernames().size()); assertTrue(service.getUserModel("garbage") != null); assertTrue(service.getUserModel("GaRbAgE") != null); assertTrue(service.getUserModel("GARBAGE") != null); // confirm reloaded test user newUser = service.getUserModel("test"); assertEquals("testPassword", newUser.password); assertEquals(3, newUser.repositories.size()); assertTrue(newUser.hasRepository("repo1")); assertTrue(newUser.hasRepository("repo2")); assertTrue(newUser.hasRepository("sub/repo3")); // confirm authentication of test user UserModel testUser = service.authenticate("test", "testPassword".toCharArray()); assertEquals("test", testUser.username); assertEquals("testPassword", testUser.password); // delete a repository role and confirm role removal from test user service.deleteRepositoryRole("repo2"); testUser = service.getUserModel("test"); assertEquals(2, testUser.repositories.size()); // delete garbage user and confirm user count service.deleteUser("garbage"); assertEquals(2, service.getAllUsernames().size()); // rename repository and confirm role change for test user service.renameRepositoryRole("repo1", "newrepo1"); testUser = service.getUserModel("test"); assertTrue(testUser.hasRepository("newrepo1")); } protected void testTeams(IUserService service) { // confirm we have 1 team (admins) assertEquals(1, service.getAllTeamNames().size()); assertEquals("admins", service.getAllTeamNames().get(0)); // remove newrepo1 from test user // now test user has no repositories UserModel user = service.getUserModel("test"); user.repositories.clear(); service.updateUserModel(user); user = service.getUserModel("test"); assertEquals(0, user.repositories.size()); assertFalse(user.canAccessRepository("newrepo1")); assertFalse(user.canAccessRepository("NEWREPO1")); // create test team and add test user and newrepo1 TeamModel team = new TeamModel("testteam"); team.addUser("test"); team.addRepository("newrepo1"); service.updateTeamModel(team); // confirm 1 user and 1 repo team = service.getTeamModel("testteam"); assertEquals(1, team.repositories.size()); assertEquals(1, team.users.size()); // confirm team membership user = service.getUserModel("test"); assertEquals(0, user.repositories.size()); assertEquals(1, user.teams.size()); // confirm team access assertTrue(team.hasRepository("newrepo1")); assertTrue(user.hasTeamAccess("newrepo1")); assertTrue(team.hasRepository("NEWREPO1")); assertTrue(user.hasTeamAccess("NEWREPO1")); // rename the team and add new repository team.addRepository("newrepo2"); team.name = "testteam2"; service.updateTeamModel("testteam", team); team = service.getTeamModel("testteam2"); user = service.getUserModel("test"); // confirm user and team can access newrepo2 assertEquals(2, team.repositories.size()); assertTrue(team.hasRepository("newrepo2")); assertTrue(user.hasTeamAccess("newrepo2")); assertTrue(team.hasRepository("NEWREPO2")); assertTrue(user.hasTeamAccess("NEWREPO2")); // delete testteam2 service.deleteTeam("testteam2"); team = service.getTeamModel("testteam2"); user = service.getUserModel("test"); // confirm team does not exist and user can not access newrepo1 and 2 assertEquals(null, team); assertFalse(user.canAccessRepository("newrepo1")); assertFalse(user.canAccessRepository("newrepo2")); // create new team and add it to user // this tests the inverse team creation/team addition team = new TeamModel("testteam"); team.addRepository("NEWREPO1"); team.addRepository("NEWREPO2"); user.teams.add(team); service.updateUserModel(user); // confirm the inverted team addition user = service.getUserModel("test"); team = service.getTeamModel("testteam"); assertTrue(user.hasTeamAccess("newrepo1")); assertTrue(user.hasTeamAccess("newrepo2")); assertTrue(team.hasUser("test")); // drop testteam from user and add nextteam to user team = new TeamModel("nextteam"); team.addRepository("NEWREPO1"); team.addRepository("NEWREPO2"); user.teams.clear(); user.teams.add(team); service.updateUserModel(user); // confirm implicit drop user = service.getUserModel("test"); team = service.getTeamModel("testteam"); assertTrue(user.hasTeamAccess("newrepo1")); assertTrue(user.hasTeamAccess("newrepo2")); assertFalse(team.hasUser("test")); team = service.getTeamModel("nextteam"); assertTrue(team.hasUser("test")); // delete the user and confirm team no longer has user service.deleteUser("test"); team = service.getTeamModel("testteam"); assertFalse(team.hasUser("test")); // delete both teams service.deleteTeam("testteam"); service.deleteTeam("nextteam"); // assert we still have the admins team assertEquals(1, service.getAllTeamNames().size()); assertEquals("admins", service.getAllTeamNames().get(0)); team = service.getTeamModel("admins"); assertEquals(1, team.mailingLists.size()); assertTrue(team.mailingLists.contains("admins@localhost.com")); } }
/* * Copyright 2014 JBoss by Red Hat. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.kie.test.util; import static org.junit.Assert.fail; import java.io.File; import java.io.FilenameFilter; import java.util.List; import java.util.Map; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; import javax.persistence.EntityManagerFactory; import org.drools.compiler.kie.builder.impl.InternalKieModule; import org.jbpm.kie.services.impl.FormManagerServiceImpl; import org.jbpm.kie.services.impl.KModuleDeploymentService; import org.jbpm.kie.services.impl.ProcessServiceImpl; import org.jbpm.kie.services.impl.RuntimeDataServiceImpl; import org.jbpm.kie.services.impl.UserTaskServiceImpl; import org.jbpm.kie.services.impl.bpmn2.BPMN2DataServiceImpl; import org.jbpm.kie.services.test.TestIdentityProvider; import org.jbpm.process.instance.impl.util.LoggingPrintStream; import org.jbpm.runtime.manager.impl.RuntimeManagerFactoryImpl; import org.jbpm.runtime.manager.impl.jpa.EntityManagerFactoryManager; import org.jbpm.services.api.DefinitionService; import org.jbpm.services.api.DeploymentService; import org.jbpm.services.api.ProcessService; import org.jbpm.services.api.RuntimeDataService; import org.jbpm.services.api.UserTaskService; import org.jbpm.services.task.HumanTaskServiceFactory; import org.jbpm.shared.services.impl.TransactionalCommandService; import org.junit.AfterClass; import org.junit.BeforeClass; import org.kie.api.KieServices; import org.kie.api.builder.KieBuilder; import org.kie.api.builder.KieFileSystem; import org.kie.api.builder.Message; import org.kie.api.builder.ReleaseId; import org.kie.api.builder.model.KieBaseModel; import org.kie.api.builder.model.KieModuleModel; import org.kie.api.builder.model.KieSessionModel; import org.kie.api.conf.EqualityBehaviorOption; import org.kie.api.conf.EventProcessingOption; import org.kie.api.runtime.conf.ClockTypeOption; import org.kie.internal.io.ResourceFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import bitronix.tm.resource.jdbc.PoolingDataSource; public abstract class AbstractKieServicesBaseTest { private static final Logger logger = LoggerFactory.getLogger(AbstractKieServicesBaseTest.class); protected static final String ARTIFACT_ID = "test-module"; protected static final String GROUP_ID = "org.jbpm.test"; protected static final String VERSION = "1.0.0-SNAPSHOT"; private PoolingDataSource ds; protected EntityManagerFactory emf; protected DeploymentService deploymentService; protected DefinitionService bpmn2Service; protected RuntimeDataService runtimeDataService; protected ProcessService processService; protected UserTaskService userTaskService; protected TestIdentityProvider identityProvider; @BeforeClass public static void configure() { LoggingPrintStream.interceptSysOutSysErr(); } @AfterClass public static void reset() { LoggingPrintStream.resetInterceptSysOutSysErr(); } protected void close() { if (emf != null) { emf.close(); } EntityManagerFactoryManager.get().clear(); closeDataSource(); } protected void configureServices() { buildDatasource(); emf = EntityManagerFactoryManager.get().getOrCreate("org.jbpm.domain"); identityProvider = new TestIdentityProvider(); // build definition service bpmn2Service = new BPMN2DataServiceImpl(); // build deployment service deploymentService = new KModuleDeploymentService(); ((KModuleDeploymentService)deploymentService).setBpmn2Service(bpmn2Service); ((KModuleDeploymentService)deploymentService).setEmf(emf); ((KModuleDeploymentService)deploymentService).setIdentityProvider(identityProvider); ((KModuleDeploymentService)deploymentService).setManagerFactory(new RuntimeManagerFactoryImpl()); ((KModuleDeploymentService)deploymentService).setFormManagerService(new FormManagerServiceImpl()); // build runtime data service runtimeDataService = new RuntimeDataServiceImpl(); ((RuntimeDataServiceImpl) runtimeDataService).setCommandService(new TransactionalCommandService(emf)); ((RuntimeDataServiceImpl) runtimeDataService).setIdentityProvider(identityProvider); ((RuntimeDataServiceImpl) runtimeDataService).setTaskService(HumanTaskServiceFactory.newTaskServiceConfigurator().entityManagerFactory(emf).getTaskService()); ((KModuleDeploymentService)deploymentService).setRuntimeDataService(runtimeDataService); // set runtime data service as listener on deployment service ((KModuleDeploymentService)deploymentService).addListener(((RuntimeDataServiceImpl) runtimeDataService)); ((KModuleDeploymentService)deploymentService).addListener(((BPMN2DataServiceImpl) bpmn2Service)); // build process service processService = new ProcessServiceImpl(); ((ProcessServiceImpl) processService).setDataService(runtimeDataService); ((ProcessServiceImpl) processService).setDeploymentService(deploymentService); // build user task service userTaskService = new UserTaskServiceImpl(); ((UserTaskServiceImpl) userTaskService).setDataService(runtimeDataService); ((UserTaskServiceImpl) userTaskService).setDeploymentService(deploymentService); } protected String getPom(ReleaseId releaseId, ReleaseId... dependencies) { String pom = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n" + " <modelVersion>4.0.0</modelVersion>\n" + "\n" + " <groupId>" + releaseId.getGroupId() + "</groupId>\n" + " <artifactId>" + releaseId.getArtifactId() + "</artifactId>\n" + " <version>" + releaseId.getVersion() + "</version>\n" + "\n"; if (dependencies != null && dependencies.length > 0) { pom += "<dependencies>\n"; for (ReleaseId dep : dependencies) { pom += "<dependency>\n"; pom += " <groupId>" + dep.getGroupId() + "</groupId>\n"; pom += " <artifactId>" + dep.getArtifactId() + "</artifactId>\n"; pom += " <version>" + dep.getVersion() + "</version>\n"; pom += "</dependency>\n"; } pom += "</dependencies>\n"; } pom += "</project>"; return pom; } protected InternalKieModule createKieJar(KieServices ks, ReleaseId releaseId, List<String> resources) { return createKieJar(ks, releaseId, resources, null); } protected InternalKieModule createKieJar(KieServices ks, ReleaseId releaseId, List<String> resources, Map<String, String> extraResources ) { KieFileSystem kfs = createKieFileSystemWithKProject(ks); kfs.writePomXML( getPom(releaseId) ); for (String resource : resources) { kfs.write("src/main/resources/KBase-test/" + resource, ResourceFactory.newClassPathResource(resource)); } if (extraResources != null) { for (Map.Entry<String, String> entry : extraResources.entrySet()) { kfs.write(entry.getKey(), ResourceFactory.newByteArrayResource(entry.getValue().getBytes())); } } kfs.write("src/main/resources/forms/DefaultProcess.ftl", ResourceFactory.newClassPathResource("repo/globals/forms/DefaultProcess.ftl")); KieBuilder kieBuilder = ks.newKieBuilder(kfs); if (!kieBuilder.buildAll().getResults().getMessages().isEmpty()) { for (Message message : kieBuilder.buildAll().getResults().getMessages()) { logger.error("Error Message: ({}) {}", message.getPath(), message.getText()); } throw new RuntimeException( "There are errors builing the package, please check your knowledge assets!"); } return ( InternalKieModule ) kieBuilder.getKieModule(); } protected KieFileSystem createKieFileSystemWithKProject(KieServices ks) { KieModuleModel kproj = ks.newKieModuleModel(); KieBaseModel kieBaseModel1 = kproj.newKieBaseModel("KBase-test").setDefault(true).addPackage("*") .setEqualsBehavior( EqualityBehaviorOption.EQUALITY ) .setEventProcessingMode( EventProcessingOption.STREAM ); kieBaseModel1.newKieSessionModel("ksession-test").setDefault(true) .setType(KieSessionModel.KieSessionType.STATEFUL) .setClockType( ClockTypeOption.get("realtime") ) .newWorkItemHandlerModel("Log", "new org.jbpm.process.instance.impl.demo.SystemOutWorkItemHandler()"); kieBaseModel1.newKieSessionModel("ksession-test-2").setDefault(false) .setType(KieSessionModel.KieSessionType.STATEFUL) .setClockType( ClockTypeOption.get("realtime") ) .newWorkItemHandlerModel("Log", "new org.jbpm.kie.services.test.objects.KieConteinerSystemOutWorkItemHandler(kieContainer)"); kieBaseModel1.newKieSessionModel("ksession-test2").setDefault(false) .setType(KieSessionModel.KieSessionType.STATEFUL) .setClockType( ClockTypeOption.get("realtime") ); KieFileSystem kfs = ks.newKieFileSystem(); kfs.writeKModuleXML(kproj.toXML()); return kfs; } protected void buildDatasource() { ds = new PoolingDataSource(); ds.setUniqueName("jdbc/testDS1"); //NON XA CONFIGS ds.setClassName("org.h2.jdbcx.JdbcDataSource"); ds.setMaxPoolSize(3); ds.setAllowLocalTransactions(true); ds.getDriverProperties().put("user", "sa"); ds.getDriverProperties().put("password", "sasa"); ds.getDriverProperties().put("URL", "jdbc:h2:mem:mydb"); ds.init(); } protected void closeDataSource() { if (ds != null) { ds.close(); } } public static void cleanupSingletonSessionId() { File tempDir = new File(System.getProperty("java.io.tmpdir")); if (tempDir.exists()) { String[] jbpmSerFiles = tempDir.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith("-jbpmSessionId.ser"); } }); for (String file : jbpmSerFiles) { logger.debug("Temp dir to be removed {} file {}",tempDir, file); new File(tempDir, file).delete(); } } } public void setDeploymentService(DeploymentService deploymentService) { this.deploymentService = deploymentService; } public void setBpmn2Service(DefinitionService bpmn2Service) { this.bpmn2Service = bpmn2Service; } public void setRuntimeDataService(RuntimeDataService runtimeDataService) { this.runtimeDataService = runtimeDataService; } public void setProcessService(ProcessService processService) { this.processService = processService; } public void setUserTaskService(UserTaskService userTaskService) { this.userTaskService = userTaskService; } protected static void waitForTheOtherThreads(CyclicBarrier barrier) { try { barrier.await(); } catch( InterruptedException e ) { fail( "Thread 1 was interrupted while waiting for the other threads!"); } catch( BrokenBarrierException e ) { fail( "Thread 1's barrier was broken while waiting for the other threads!"); } } }
package com.lachesis.mnis.core.order.repository.impl; import static org.junit.Assert.assertNotNull; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import com.google.gson.Gson; import com.lachesis.mnis.core.SpringTest; import com.lachesis.mnis.core.order.entity.HisOrderGroup; import com.lachesis.mnis.core.order.entity.OrderBedInfo; import com.lachesis.mnis.core.order.entity.OrderCount; import com.lachesis.mnis.core.order.entity.OrderExecDocumentInfo; import com.lachesis.mnis.core.order.entity.OrderExecGroup; import com.lachesis.mnis.core.order.entity.OrderPrintInfo; import com.lachesis.mnis.core.order.repository.OrderRepository; import com.lachesis.mnis.core.util.DateUtil; import com.lachesis.mnis.core.util.StringUtil; public class OrderServiceImplTest extends SpringTest { @Autowired OrderRepository orderRepository; private static final Logger LOGGER = LoggerFactory .getLogger(OrderServiceImplTest.class); private String patientId = null; private List<String> patientIds = new ArrayList<>(); private String orderTypeCode = null; private Date startDate = null; private Date endDate = null; private String orderGroupId = null; private Date[] dateRegion = new Date[2]; HashMap<String, Object> params = new HashMap<String, Object>(); @Before public void init() { patientId = "34363864_1"; patientIds.add("ZA4673929_1"); startDate = getCurDateWithMinTime(); endDate = getCurDateWithMaxTime(); orderGroupId = "ZA1001803*30*1"; dateRegion = DateUtil.getQueryRegionDates(new Date()); params.put("deptCode", "5042"); params.put("queryDate", "2016-02-19"); } @Test public void testInit() { assertNotNull(orderRepository); } @Test public void testSelectOrderBaseGroupByPatientId() { /*List<OrderExecGroup> orderGroupList = orderRepository .selectOrderBaseGroupByPatientIds(patientIds, orderTypeCode, null, null, startDate, endDate); Gson gson = new Gson(); String orderGroupString = gson.toJson(orderGroupList); LOGGER.debug("testSelectOrderBaseGroupByPatientId : " + orderGroupList.size()); assertNotNull(orderGroupList);*/ } @Test public void testSelectExecutedOrderGroups() { List<OrderExecGroup> orderExecGroups = orderRepository .selectExecutedOrderGroups(patientId, null, null, startDate, endDate); if (orderExecGroups != null && orderExecGroups.size() == 0) orderExecGroups = null; Assert.assertNull(orderExecGroups); } @Test public void testSelectOrderGroupDetailByGroupId() { OrderExecGroup orderExecGroup = orderRepository .getOrderListScanGroupId(orderGroupId,null,null); Assert.assertNull(orderExecGroup); } /* * @Test public void testSelectPendingOrderGroupByPatientId() { * List<OrderExecGroup> orderExecGroups = orderRepository * .selectPendingOrderGroupByPatientId(patientId, startDate, endDate, * orderTypeCode); assertNotNull(orderExecGroups); } */ @Test public void testInsertSingleOrderExecution() { // List<OrderExecGroup> orderExecGroups = orderRepository // .selectExecutedOrderGroups(patientId, startDate, endDate); // assertNotNull(orderExecGroups); } @Test public void testUpdateOrderExecution() { /* * List<OrderExecGroup> orderExecGroups = orderRepository * .selectExecutedOrderGroups(patientId, startDate, endDate); * assertNotNull(orderExecGroups); */ } @Test public void testGetShiftOrderList() { // List<OrderExecGroup> orderExecGroups = orderRepository // .getShiftOrderList(patientIds, startDate, endDate); // assertNotNull(orderExecGroups); } @Test public void testGetOrderBedInfoList() { List<OrderBedInfo> orderBedInfos = orderRepository.getOrderLongBedInfoList( patientIds,null, "0",null, startDate, endDate); Assert.assertNull(orderBedInfos); } @Test public void testgetOriginalOrderList(){ List<HisOrderGroup> hisOrderGroups = orderRepository.getOriginalOrderList( patientIds, null, null, dateRegion[0], dateRegion[1]); Gson gson = new Gson(); String gsonString = gson.toJson(hisOrderGroups); assertNotNull(hisOrderGroups); } @Test public void testGetOrderPrintInfos(){ HashMap<String, Object> params = new HashMap<String, Object>(); params.put("deptCode", "H250005"); params.put("orderTypeCode", "1"); params.put("startDate",startDate); params.put("endDate", endDate); List<OrderPrintInfo> hisOrderGroups = orderRepository.getOrderPrintInfos(params); Gson gson = new Gson(); String gsonString = gson.toJson(hisOrderGroups); assertNotNull(hisOrderGroups); } @Test public void testGetOrderExecDocumentInfos(){ List<OrderExecDocumentInfo> orderExecDocumentInfos = orderRepository.getOrdExecDocInfos(params); Gson gson = new Gson(); String gsonString = gson.toJson(orderExecDocumentInfos); StringUtil.writeStrToFile(gsonString, "D:/jsonFile/orderExecDocumentInfos.txt"); Assert.assertNotNull(gsonString); } @Test public void testGetOrdExecDocInfosToOral(){ List<OrderExecDocumentInfo> orderExecDocumentInfos = orderRepository.getOrdExecDocInfosToOral(params); Gson gson = new Gson(); String gsonString = gson.toJson(orderExecDocumentInfos); StringUtil.writeStrToFile(gsonString, "D:/jsonFile/getOrdExecDocInfosToOral.txt"); Assert.assertNotNull(gsonString); } @Test public void testGetOrdExecDocInfosToLab(){ HashMap<String, Object> labParams = new HashMap<String, Object>(); labParams.put("deptCode", "5042"); labParams.put("queryDate", "2015-12-25"); List<OrderExecDocumentInfo> orderExecDocumentInfos = orderRepository.getOrdExecDocInfosToLab(labParams); Gson gson = new Gson(); String gsonString = gson.toJson(orderExecDocumentInfos); StringUtil.writeStrToFile(gsonString, "D:/jsonFile/getOrdExecDocInfosToLab.txt"); Assert.assertNotNull(gsonString); } @Test public void testGetOrdExecDocInfosOnInfuCard(){ List<OrderExecDocumentInfo> orderExecDocumentInfos = orderRepository.getOrdExecDocInfosOnInfuCard(params); Gson gson = new Gson(); String gsonString = gson.toJson(orderExecDocumentInfos); StringUtil.writeStrToFile(gsonString, "D:/jsonFile/getOrdExecDocInfosOnInfuCard.txt"); Assert.assertNotNull(gsonString); } @Test public void testGetOrdExecDocInfosOnLabel(){ List<OrderExecDocumentInfo> orderExecDocumentInfos = orderRepository.getOrdExecDocInfosOnLabel(params); Gson gson = new Gson(); String gsonString = gson.toJson(orderExecDocumentInfos); StringUtil.writeStrToFile(gsonString, "D:/jsonFile/getOrdExecDocInfosOnLabel.txt"); Assert.assertNotNull(gsonString); } @Test public void testselectOrderBaseGroupByPatId(){ List<OrderExecGroup> orders = orderRepository .selectOrderBaseGroupByPatId("ZY020000137670", null, "ORAL", null, DateUtil.parse("2016-02-19"), DateUtil.parse("2016-02-20")); Assert.assertNotNull(orders); } @Test public void testGetOrderBarcodeType(){ String ordType = orderRepository .getOrderBarcodeType("13", null); Assert.assertSame("NORMAL", ordType); } @Test public void testGetOrdExecDocInfosToNda(){ HashMap<String, Object> params = new HashMap<String, Object>(); params.put("deptCode", "5042"); params.put("startDate", "2016-05-01"); params.put("endDate", "2016-05-03"); List<OrderExecGroup> orderExecGroups = orderRepository.getOrdExecDocInfosToNda(params); Assert.assertNotNull(orderExecGroups); } @Test public void testGetOrdExecDocInfosToNdaOral(){ HashMap<String, Object> params = new HashMap<String, Object>(); params.put("deptCode", "5042"); params.put("startDate", "2016-05-01"); params.put("endDate", "2016-05-03"); List<OrderExecGroup> orderExecGroups = orderRepository.getOrdExecDocInfosToNdaOral(params); Assert.assertNotNull(orderExecGroups); } @Test public void testGetOrdExecDocInfosToNdaLab(){ HashMap<String, Object> params = new HashMap<String, Object>(); params.put("deptCode", "5042"); params.put("startDate", "2016-05-01"); params.put("endDate", "2016-05-03"); List<OrderExecGroup> orderExecGroups = orderRepository.getOrdExecDocInfosToNdaLab(params); Assert.assertNotNull(orderExecGroups); } @Test public void testGetBloodOrderGroupDetailByBarcode(){ OrderExecGroup orderExecGroup = orderRepository.getBloodOrderGroupDetailByBarcode("111", "22", null); Gson gson = new Gson(); String gsonString = gson.toJson(orderExecGroup); Assert.assertNotNull(gsonString); } @Test public void testGetBloodOrderCount(){ OrderCount orderCount = orderRepository.getBloodOrderCount("111", new Date()); Gson gson = new Gson(); String gsonString = gson.toJson(orderCount); Assert.assertNotNull(gsonString); } }
// Copyright 2008 Google Inc. All Rights Reserved. package org.clearsilver.jni; import org.clearsilver.CSFileLoader; import org.clearsilver.HDF; import java.io.IOException; import java.util.Calendar; import java.util.Date; import java.util.TimeZone; /** * This class is a wrapper around the HDF C API. Many features of the C API * are not yet exposed through this wrapper. * * @author Sergio Marti (smarti@google.com) */ public class JniHdf implements HDF { long hdfptr; // stores the C HDF* pointer JniHdf root; // If this is a child HDF node, points at the root node of // the tree. For root nodes this is null. A child node needs // to hold a reference on the root to prevent the root from // being GC-ed. static { JNI.loadLibrary(); } static JniHdf cast(HDF hdf) { if (!(hdf instanceof JniHdf)) { throw new IllegalArgumentException("HDF object not of type JniHdf. " + "Make sure you use the same ClearsilverFactory to construct all " + "related HDF and CS objects."); } return (JniHdf)hdf; } /** * Default public constructor. */ public JniHdf() { hdfptr = _init(); root = null; } protected JniHdf(long hdfptr, JniHdf parent) { this.hdfptr = hdfptr; this.root = (parent.root != null) ? parent.root : parent; } /** Constructs an HDF child node. Used by other methods in this class when * a child node needs to be constructed. */ protected JniHdf newHdf(long hdfptr, HDF parent) { return new JniHdf(hdfptr, cast(parent)); } /** Clean up allocated memory if neccesary. close() allows application * to force clean up. */ public void close() { // Only root nodes have ownership of the C HDF pointer, so only a root // node needs to dealloc hdfptr.dir if (root == null) { if (hdfptr != 0) { _dealloc(hdfptr); hdfptr = 0; } } } /** Call close() just in case when deallocating Java object. */ protected void finalize() throws Throwable { close(); super.finalize(); } /** Loads the contents of the specified HDF file from disk into the current * HDF object. The loaded contents are merged with the existing contents. * @param filename the name of file to read in and parse. * @throws java.io.FileNotFoundException if the specified file does not * exist. * @throws IOException other problems reading the file. */ public boolean readFile(String filename) throws IOException { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } return _readFile(hdfptr, filename, fileLoader != null); } protected String fileLoad(String filename) throws IOException { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } CSFileLoader aFileLoader = fileLoader; if (aFileLoader == null) { throw new NullPointerException("No fileLoader specified."); } else { String result = aFileLoader.load(this, filename); if (result == null) { throw new NullPointerException("CSFileLoader.load() returned null"); } return result; } } // The optional CS file loader to use to read in files private CSFileLoader fileLoader = null; /** * Get the file loader in use, if any. * @return the file loader in use. */ public CSFileLoader getFileLoader() { return fileLoader; } /** * Set the CS file loader to use * @param fileLoader the file loader that should be used. */ public void setFileLoader(CSFileLoader fileLoader) { this.fileLoader = fileLoader; } /** Serializes HDF contents to a file (readable by readFile) */ public boolean writeFile(String filename) throws IOException { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } return _writeFile(hdfptr, filename); } /** Parses/loads the contents of the given string as HDF into the current * HDF object. The loaded contents are merged with the existing contents. */ public boolean readString(String data) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } return _readString(hdfptr, data); } /** Serializes HDF contents to a string (readable by readString) */ public String writeString() { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } return _writeString(hdfptr); } /** Retrieves the integer value at the specified path in this HDF node's * subtree. If the value does not exist, or cannot be converted to an * integer, default_value will be returned. */ public int getIntValue(String hdfname, int default_value) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } return _getIntValue(hdfptr,hdfname,default_value); } /** Retrieves the value at the specified path in this HDF node's subtree. */ public String getValue(String hdfname, String default_value) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } return _getValue(hdfptr,hdfname,default_value); } /** Sets the value at the specified path in this HDF node's subtree. */ public void setValue(String hdfname, String value) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } _setValue(hdfptr,hdfname,value); } /** Remove the specified subtree. */ public void removeTree(String hdfname) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } _removeTree(hdfptr,hdfname); } /** Links the src hdf name to the dest. */ public void setSymLink(String hdf_name_src, String hdf_name_dest) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } _setSymLink(hdfptr,hdf_name_src,hdf_name_dest); } /** Export a date to a clearsilver tree using a specified timezone */ public void exportDate(String hdfname, TimeZone timeZone, Date date) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } Calendar cal = Calendar.getInstance(timeZone); cal.setTime(date); String sec = Integer.toString(cal.get(Calendar.SECOND)); setValue(hdfname + ".sec", sec.length() == 1 ? "0" + sec : sec); String min = Integer.toString(cal.get(Calendar.MINUTE)); setValue(hdfname + ".min", min.length() == 1 ? "0" + min : min); setValue(hdfname + ".24hour", Integer.toString(cal.get(Calendar.HOUR_OF_DAY))); // java.util.Calendar uses represents 12 o'clock as 0 setValue(hdfname + ".hour", Integer.toString( cal.get(Calendar.HOUR) == 0 ? 12 : cal.get(Calendar.HOUR))); setValue(hdfname + ".am", cal.get(Calendar.AM_PM) == Calendar.AM ? "1" : "0"); setValue(hdfname + ".mday", Integer.toString(cal.get(Calendar.DAY_OF_MONTH))); setValue(hdfname + ".mon", Integer.toString(cal.get(Calendar.MONTH)+1)); setValue(hdfname + ".year", Integer.toString(cal.get(Calendar.YEAR))); setValue(hdfname + ".2yr", Integer.toString(cal.get(Calendar.YEAR)).substring(2)); // Java DAY_OF_WEEK puts Sunday .. Saturday as 1 .. 7 respectively // See http://java.sun.com/j2se/1.5.0/docs/api/java/util/Calendar.html#DAY_OF_WEEK // However, C and Python export Sun .. Sat as 0 .. 6, because // POSIX localtime_r produces wday 0 .. 6. So, adjust. setValue(hdfname + ".wday", Integer.toString(cal.get(Calendar.DAY_OF_WEEK) - 1)); boolean tzNegative = timeZone.getRawOffset() < 0; int tzAbsolute = java.lang.Math.abs(timeZone.getRawOffset()/1000); String tzHour = Integer.toString(tzAbsolute/3600); String tzMin = Integer.toString(tzAbsolute/60 - (tzAbsolute/3600)*60); String tzString = (tzNegative ? "-" : "+") + (tzHour.length() == 1 ? "0" + tzHour : tzHour) + (tzMin.length() == 1 ? "0" + tzMin : tzMin); setValue(hdfname + ".tzoffset", tzString); } /** Export a date to a clearsilver tree using a specified timezone */ public void exportDate(String hdfname, String tz, int tt) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } TimeZone timeZone = TimeZone.getTimeZone(tz); if (timeZone == null) { throw new RuntimeException("Unknown timezone: " + tz); } Date date = new Date((long)tt * 1000); exportDate(hdfname, timeZone, date); } /** Retrieves the HDF object that is the root of the subtree at hdfpath, or * null if no object exists at that path. */ public JniHdf getObj(String hdfpath) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } long obj_ptr = _getObj(hdfptr, hdfpath); if ( obj_ptr == 0 ) { return null; } return newHdf(obj_ptr, this); } /** Retrieves the HDF for the first child of the root of the subtree * at hdfpath, or null if no child exists of that path or if the * path doesn't exist. */ public JniHdf getChild(String hdfpath) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } long obj_ptr = _getChild(hdfptr, hdfpath); if ( obj_ptr == 0 ) { return null; } return newHdf(obj_ptr, this); } /** Return the root of the tree where the current node lies. If the * current node is the root, return this. */ public JniHdf getRootObj() { return root != null ? root : this; } public boolean belongsToSameRoot(HDF hdf) { JniHdf jniHdf = cast(hdf); return this.getRootObj() == jniHdf.getRootObj(); } /** Retrieves the HDF object that is the root of the subtree at * hdfpath, create the subtree if it doesn't exist */ public JniHdf getOrCreateObj(String hdfpath) { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } long obj_ptr = _getObj(hdfptr, hdfpath); if ( obj_ptr == 0 ) { // Create a node _setValue(hdfptr, hdfpath, ""); obj_ptr = _getObj( hdfptr, hdfpath ); if ( obj_ptr == 0 ) { return null; } } return newHdf(obj_ptr, this); } /** Returns the name of this HDF node. The root node has no name, so * calling this on the root node will return null. */ public String objName() { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } return _objName(hdfptr); } /** Returns the value of this HDF node, or null if this node has no value. * Every node in the tree can have a value, a child, and a next peer. */ public String objValue() { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } return _objValue(hdfptr); } /** Returns the child of this HDF node, or null if there is no child. * Use this in conjunction with objNext to walk the HDF tree. Every node * in the tree can have a value, a child, and a next peer. */ public JniHdf objChild() { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } long child_ptr = _objChild(hdfptr); if ( child_ptr == 0 ) { return null; } return newHdf(child_ptr, this); } /** Returns the next sibling of this HDF node, or null if there is no next * sibling. Use this in conjunction with objChild to walk the HDF tree. * Every node in the tree can have a value, a child, and a next peer. */ public JniHdf objNext() { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } long next_ptr = _objNext(hdfptr); if ( next_ptr == 0 ) { return null; } return newHdf(next_ptr, this); } public void copy(String hdfpath, HDF src) { JniHdf source = cast(src); if (hdfptr == 0 || source.hdfptr == 0) { throw new NullPointerException("HDF is closed."); } _copy(hdfptr, hdfpath, source.hdfptr); } /** * Generates a string representing the content of the HDF tree rooted at * this node. */ public String dump() { if (hdfptr == 0) { throw new NullPointerException("HDF is closed."); } return _dump(hdfptr); } private static native long _init(); private static native void _dealloc(long ptr); private native boolean _readFile(long ptr, String filename, boolean use_cb) throws IOException; private static native boolean _writeFile(long ptr, String filename); private static native boolean _readString(long ptr, String data); private static native String _writeString(long ptr); private static native int _getIntValue(long ptr, String hdfname, int default_value); private static native String _getValue(long ptr, String hdfname, String default_value); private static native void _setValue(long ptr, String hdfname, String hdf_value); private static native void _removeTree(long ptr, String hdfname); private static native void _setSymLink(long ptr, String hdf_name_src, String hdf_name_dest); private static native long _getObj(long ptr, String hdfpath); private static native long _getChild(long ptr, String hdfpath); private static native long _objChild(long ptr); private static native long _objNext(long ptr); private static native String _objName(long ptr); private static native String _objValue(long ptr); private static native void _copy(long destptr, String hdfpath, long srcptr); private static native String _dump(long ptr); }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.testsuite.adapter.example.authorization; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.keycloak.common.Profile.Feature.UPLOAD_SCRIPTS; import static org.keycloak.testsuite.util.WaitUtils.waitForPageToLoad; import static org.keycloak.testsuite.utils.io.IOUtil.loadRealm; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.List; import org.jboss.arquillian.container.test.api.Deployer; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.test.api.ArquillianResource; import org.jboss.shrinkwrap.api.spec.WebArchive; import org.junit.BeforeClass; import org.junit.Test; import org.keycloak.admin.client.resource.AuthorizationResource; import org.keycloak.admin.client.resource.ClientResource; import org.keycloak.admin.client.resource.ClientsResource; import org.keycloak.admin.client.resource.ResourcePermissionsResource; import org.keycloak.representations.idm.ClientRepresentation; import org.keycloak.representations.idm.RealmRepresentation; import org.keycloak.representations.idm.authorization.ResourcePermissionRepresentation; import org.keycloak.representations.idm.authorization.ResourceRepresentation; import org.keycloak.testsuite.ProfileAssume; import org.keycloak.testsuite.adapter.AbstractExampleAdapterTest; import org.keycloak.testsuite.arquillian.annotation.AppServerContainer; import org.keycloak.testsuite.arquillian.annotation.EnableFeature; import org.keycloak.testsuite.utils.arquillian.ContainerConstants; import org.keycloak.testsuite.util.UIUtils; import org.openqa.selenium.By; /** * @author <a href="mailto:psilva@redhat.com">Pedro Igor</a> */ @AppServerContainer(ContainerConstants.APP_SERVER_WILDFLY) @AppServerContainer(ContainerConstants.APP_SERVER_WILDFLY_DEPRECATED) @AppServerContainer(ContainerConstants.APP_SERVER_EAP) @AppServerContainer(ContainerConstants.APP_SERVER_EAP6) @AppServerContainer(ContainerConstants.APP_SERVER_UNDERTOW) @AppServerContainer(ContainerConstants.APP_SERVER_EAP71) @AppServerContainer(ContainerConstants.APP_SERVER_TOMCAT7) @AppServerContainer(ContainerConstants.APP_SERVER_TOMCAT8) @AppServerContainer(ContainerConstants.APP_SERVER_TOMCAT9) @EnableFeature(value = UPLOAD_SCRIPTS, skipRestart = true) public class ServletPolicyEnforcerTest extends AbstractExampleAdapterTest { protected static final String REALM_NAME = "servlet-policy-enforcer-authz"; protected static final String RESOURCE_SERVER_ID = "servlet-policy-enforcer"; @ArquillianResource private Deployer deployer; @Override public void addAdapterTestRealms(List<RealmRepresentation> testRealms) { testRealms.add( loadRealm(new File(TEST_APPS_HOME_DIR + "/servlet-policy-enforcer/servlet-policy-enforcer-authz-realm.json"))); } @Deployment(name = RESOURCE_SERVER_ID, managed = false) public static WebArchive deployment() throws IOException { return exampleDeployment(RESOURCE_SERVER_ID); } @Test public void testPattern1() { performTests(() -> { login("alice", "alice"); navigateTo("/resource/a/b"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 1 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/resource/a/b"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 1 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/resource/a/b"); assertFalse(wasDenied()); }); } @Test public void testPattern2() { performTests(() -> { login("alice", "alice"); navigateTo("/a/resource-a"); assertFalse(wasDenied()); navigateTo("/b/resource-a"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 2 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/a/resource-a"); assertTrue(wasDenied()); navigateTo("/b/resource-a"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 2 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/b/resource-a"); assertFalse(wasDenied()); }); } @Test public void testPattern3() { performTests(() -> { login("alice", "alice"); navigateTo("/a/resource-b"); assertFalse(wasDenied()); navigateTo("/b/resource-b"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 3 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/a/resource-b"); assertTrue(wasDenied()); navigateTo("/b/resource-b"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 3 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/b/resource-b"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 2 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/b/resource-a"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 3 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/a/resource-b"); assertTrue(wasDenied()); navigateTo("/b/resource-a"); assertFalse(wasDenied()); }); } @Test public void testPattern4() { performTests(() -> { login("alice", "alice"); navigateTo("/resource-c"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 4 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/resource-c"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 4 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/resource-c"); assertFalse(wasDenied()); }); } @Test public void testPattern5() { performTests(() -> { login("alice", "alice"); navigateTo("/a/a/resource-d"); assertFalse(wasDenied()); navigateTo("/resource/b/resource-d"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 5 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/a/a/resource-d"); assertTrue(wasDenied()); navigateTo("/a/b/resource-d"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 5 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/a/b/resource-d"); assertFalse(wasDenied()); }); } @Test public void testPattern6() { performTests(() -> { login("alice", "alice"); navigateTo("/resource/a"); assertFalse(wasDenied()); navigateTo("/resource/b"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 6 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/resource/a"); assertTrue(wasDenied()); navigateTo("/resource/b"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 6 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/resource/b"); assertFalse(wasDenied()); }); } @Test public void testPattern7() throws Exception { performTests(() -> { login("alice", "alice"); navigateTo("/resource/a/f/b"); assertFalse(wasDenied()); navigateTo("/resource/c/f/d"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 7 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/resource/a/f/b"); assertTrue(wasDenied()); navigateTo("/resource/c/f/d"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 7 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/resource/c/f/d"); assertFalse(wasDenied()); }); } @Test public void testPattern8() { performTests(() -> { login("alice", "alice"); navigateTo("/resource"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 8 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/resource"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 8 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/resource"); assertFalse(wasDenied()); }); } @Test public void testPattern9() { performTests(() -> { login("alice", "alice"); navigateTo("/file/*.suffix"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 9 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/file/*.suffix"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 9 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/file/*.suffix"); assertFalse(wasDenied()); }); } @Test public void testPattern10() { performTests(() -> { login("alice", "alice"); navigateTo("/resource/a/i/b/c/d/e"); assertFalse(wasDenied()); navigateTo("/resource/a/i/b/c/"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 10 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/resource/a/i/b/c/d/e"); assertTrue(wasDenied()); navigateTo("/resource/a/i/b/c/d"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 10 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/resource/a/i/b/c/d"); assertFalse(wasDenied()); }); } @Test public void testPattern11UsingResourceInstancePermission() { performTests(() -> { login("alice", "alice"); navigateTo("/api/v1/resource-a"); assertFalse(wasDenied()); navigateTo("/api/v1/resource-b"); assertFalse(wasDenied()); ResourceRepresentation resource = new ResourceRepresentation("/api/v1/resource-c"); resource.setUri(resource.getName()); getAuthorizationResource().resources().create(resource); createResourcePermission(resource.getName() + " permission", resource.getName(), "Default Policy"); login("alice", "alice"); navigateTo(resource.getUri()); assertFalse(wasDenied()); updatePermissionPolicies(resource.getName() + " permission", "Deny Policy"); login("alice", "alice"); navigateTo(resource.getUri()); assertTrue(wasDenied()); updatePermissionPolicies(resource.getName() + " permission", "Default Policy"); login("alice", "alice"); navigateTo(resource.getUri()); assertFalse(wasDenied()); navigateTo("/api/v1"); assertTrue(wasDenied()); navigateTo("/api/v1/"); assertTrue(wasDenied()); navigateTo("/api"); assertTrue(wasDenied()); navigateTo("/api/"); assertTrue(wasDenied()); }); } @Test public void testPathWithPatternSlashAllAndResourceInstance() { performTests(() -> { ResourceRepresentation resource = new ResourceRepresentation("Pattern 15 Instance"); resource.setType("pattern-15"); resource.setUri("/keycloak-7148/1"); resource.setOwner("alice"); getAuthorizationResource().resources().create(resource).close(); login("alice", "alice"); navigateTo("/keycloak-7148/1"); assertFalse(wasDenied()); navigateTo("/keycloak-7148/1/sub-a/2"); assertFalse(wasDenied()); navigateTo("/keycloak-7148/1/sub-a"); assertFalse(wasDenied()); navigateTo("/keycloak-7148/1/sub-a/2/sub-b"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 15 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/keycloak-7148/1"); assertTrue(wasDenied()); navigateTo("/keycloak-7148/1/sub-a/2"); assertTrue(wasDenied()); navigateTo("/keycloak-7148/1/sub-a"); assertTrue(wasDenied()); navigateTo("/keycloak-7148/1/sub-a/2/sub-b"); assertTrue(wasDenied()); // does not exist navigateTo("/keycloak-7148/2"); assertTrue(wasDenied()); }); } @Test public void testPriorityOfURIForResource() { performTests(() -> { login("alice", "alice"); navigateTo("/realm_uri"); assertTrue(wasDenied()); navigateTo("/keycloak_json_uri"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 12 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/realm_uri"); assertTrue(wasDenied()); navigateTo("/keycloak_json_uri"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 12 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/realm_uri"); assertTrue(wasDenied()); navigateTo("/keycloak_json_uri"); assertFalse(wasDenied()); }); } @Test public void testPathOrderWithAllPaths() { performTests(() -> { login("alice", "alice"); navigateTo("/keycloak-6623"); assertFalse(wasDenied()); navigateTo("/keycloak-6623/sub-resource"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 13 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/keycloak-6623"); assertTrue(wasDenied()); navigateTo("/keycloak-6623/sub-resource"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 14 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/keycloak-6623"); assertTrue(wasDenied()); navigateTo("/keycloak-6623/sub-resource/resource"); assertTrue(wasDenied()); }); } @Test public void testMultipleUriForResourceJSONConfig() { performTests(() -> { login("alice", "alice"); navigateTo("/keycloak-7269/sub-resource1"); assertFalse(wasDenied()); navigateTo("/keycloak-7269/sub-resource1/whatever/specialSuffix"); assertFalse(wasDenied()); navigateTo("/keycloak-7269/sub-resource2"); assertFalse(wasDenied()); navigateTo("/keycloak-7269/sub-resource2/w/h/a/t/e/v/e/r"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 16 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/keycloak-7269/sub-resource1"); assertTrue(wasDenied()); navigateTo("/keycloak-7269/sub-resource1/whatever/specialSuffix"); assertTrue(wasDenied()); navigateTo("/keycloak-7269/sub-resource2"); assertTrue(wasDenied()); navigateTo("/keycloak-7269/sub-resource2/w/h/a/t/e/v/e/r"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 16 Permission", "Default Policy"); navigateTo("/keycloak-7269/sub-resource1"); assertFalse(wasDenied()); navigateTo("/keycloak-7269/sub-resource1/whatever/specialSuffix"); assertFalse(wasDenied()); navigateTo("/keycloak-7269/sub-resource2"); assertFalse(wasDenied()); navigateTo("/keycloak-7269/sub-resource2/w/h/a/t/e/v/e/r"); assertFalse(wasDenied()); }); } @Test public void testOverloadedTemplateUri() { performTests(() -> { login("alice", "alice"); navigateTo("/keycloak-8823/resource/v1/subresource/123/entities"); assertFalse(wasDenied()); navigateTo("/keycloak-8823/resource/v1/subresource/123/someother"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 17 Entities Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/keycloak-8823/resource/v1/subresource/123/entities"); assertTrue(wasDenied()); navigateTo("/keycloak-8823/resource/v1/subresource/123/someother"); assertFalse(wasDenied()); updatePermissionPolicies("Pattern 17 Entities Permission", "Default Policy"); updatePermissionPolicies("Pattern 17 Permission", "Deny Policy"); login("alice", "alice"); navigateTo("/keycloak-8823/resource/v1/subresource/123/entities"); assertFalse(wasDenied()); navigateTo("/keycloak-8823/resource/v1/subresource/123/someother"); assertTrue(wasDenied()); updatePermissionPolicies("Pattern 17 Entities Permission", "Default Policy"); updatePermissionPolicies("Pattern 17 Permission", "Default Policy"); login("alice", "alice"); navigateTo("/keycloak-8823/resource/v1/subresource/123/entities"); assertFalse(wasDenied()); navigateTo("/keycloak-8823/resource/v1/subresource/123/someother"); assertFalse(wasDenied()); }); } private void navigateTo(String path) { this.driver.navigate().to(getResourceServerUrl() + path); } private void performTests(ExceptionRunnable assertion) { performTests(() -> {}, assertion); } private void performTests(ExceptionRunnable beforeDeploy, ExceptionRunnable assertion) { try { beforeDeploy.run(); deployer.deploy(RESOURCE_SERVER_ID); assertion.run(); } catch (FileNotFoundException cause) { throw new RuntimeException("Failed to import authorization settings", cause); } catch (Exception cause) { throw new RuntimeException("Error while executing tests", cause); } finally { deployer.undeploy(RESOURCE_SERVER_ID); } } private AuthorizationResource getAuthorizationResource() { return getClientResource(RESOURCE_SERVER_ID).authorization(); } private ClientResource getClientResource(String clientId) { ClientsResource clients = this.realmsResouce().realm(REALM_NAME).clients(); ClientRepresentation resourceServer = clients.findByClientId(clientId).get(0); return clients.get(resourceServer.getId()); } private void logOut() { navigateTo(); UIUtils.clickLink(driver.findElement(By.xpath("//a[text() = 'Sign Out']"))); } private void login(String username, String password) { try { navigateTo(); if (this.driver.getCurrentUrl().startsWith(getResourceServerUrl().toString())) { logOut(); navigateTo(); } this.loginPage.form().login(username, password); navigateTo(); assertFalse(wasDenied()); } catch (Exception cause) { throw new RuntimeException("Login failed", cause); } } private void navigateTo() { this.driver.navigate().to(getResourceServerUrl() + "/"); waitForPageToLoad(); } private boolean wasDenied() { return this.driver.getPageSource().contains("You can not access this resource"); } private URL getResourceServerUrl() { try { return new URL(this.appServerContextRootPage + "/" + RESOURCE_SERVER_ID); } catch (MalformedURLException e) { throw new RuntimeException("Could not obtain resource server url.", e); } } private void updatePermissionPolicies(String permissionName, String... policyNames) { ResourcePermissionsResource permissions = getAuthorizationResource().permissions().resource(); ResourcePermissionRepresentation permission = permissions.findByName(permissionName); permission.addPolicy(policyNames); permissions.findById(permission.getId()).update(permission); } private void createResourcePermission(String name, String resourceName, String... policyNames) { ResourcePermissionRepresentation permission = new ResourcePermissionRepresentation(); permission.setName(name); permission.addResource(resourceName); permission.addPolicy(policyNames); getAuthorizationResource().permissions().resource().create(permission); } private interface ExceptionRunnable { void run() throws Exception; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.blockmanagement; import java.util.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.StorageType; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.net.NetworkTopologyWithNodeGroup; import org.apache.hadoop.net.Node; import org.apache.hadoop.net.NodeBase; /** The class is responsible for choosing the desired number of targets * for placing block replicas on environment with node-group layer. * The replica placement strategy is adjusted to: * If the writer is on a datanode, the 1st replica is placed on the local * node (or local node-group), otherwise a random datanode. * The 2nd replica is placed on a datanode that is on a different rack with 1st * replica node. * The 3rd replica is placed on a datanode which is on a different node-group * but the same rack as the second replica node. */ public class BlockPlacementPolicyWithNodeGroup extends BlockPlacementPolicyDefault { protected BlockPlacementPolicyWithNodeGroup(Configuration conf, FSClusterStats stats, NetworkTopology clusterMap, DatanodeManager datanodeManager) { initialize(conf, stats, clusterMap, host2datanodeMap); } protected BlockPlacementPolicyWithNodeGroup() { } @Override public void initialize(Configuration conf, FSClusterStats stats, NetworkTopology clusterMap, Host2NodesMap host2datanodeMap) { super.initialize(conf, stats, clusterMap, host2datanodeMap); } /** choose local node of localMachine as the target. * if localMachine is not available, choose a node on the same nodegroup or * rack instead. * @return the chosen node */ @Override protected DatanodeStorageInfo chooseLocalStorage(Node localMachine, Set<Node> excludedNodes, long blocksize, int maxNodesPerRack, List<DatanodeStorageInfo> results, boolean avoidStaleNodes, EnumMap<StorageType, Integer> storageTypes, boolean fallbackToLocalRack) throws NotEnoughReplicasException { // if no local machine, randomly choose one node if (localMachine == null) return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); // otherwise try local machine first if (localMachine instanceof DatanodeDescriptor) { DatanodeDescriptor localDataNode = (DatanodeDescriptor)localMachine; if (excludedNodes.add(localMachine)) { // was not in the excluded list for (Iterator<Map.Entry<StorageType, Integer>> iter = storageTypes .entrySet().iterator(); iter.hasNext(); ) { Map.Entry<StorageType, Integer> entry = iter.next(); for (DatanodeStorageInfo localStorage : DFSUtil.shuffle( localDataNode.getStorageInfos())) { StorageType type = entry.getKey(); if (addIfIsGoodTarget(localStorage, excludedNodes, blocksize, maxNodesPerRack, false, results, avoidStaleNodes, type) >= 0) { int num = entry.getValue(); if (num == 1) { iter.remove(); } else { entry.setValue(num - 1); } return localStorage; } } } } } // try a node on local node group DatanodeStorageInfo chosenStorage = chooseLocalNodeGroup( (NetworkTopologyWithNodeGroup)clusterMap, localMachine, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); if (chosenStorage != null) { return chosenStorage; } if (!fallbackToLocalRack) { return null; } // try a node on local rack return chooseLocalRack(localMachine, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } /** @return the node of the second replica */ private static DatanodeDescriptor secondNode(Node localMachine, List<DatanodeStorageInfo> results) { // find the second replica for(DatanodeStorageInfo nextStorage : results) { DatanodeDescriptor nextNode = nextStorage.getDatanodeDescriptor(); if (nextNode != localMachine) { return nextNode; } } return null; } @Override protected DatanodeStorageInfo chooseLocalRack(Node localMachine, Set<Node> excludedNodes, long blocksize, int maxNodesPerRack, List<DatanodeStorageInfo> results, boolean avoidStaleNodes, EnumMap<StorageType, Integer> storageTypes) throws NotEnoughReplicasException { // no local machine, so choose a random machine if (localMachine == null) { return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } // choose one from the local rack, but off-nodegroup try { final String scope = NetworkTopology.getFirstHalf(localMachine.getNetworkLocation()); return chooseRandom(scope, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } catch (NotEnoughReplicasException e1) { // find the second replica final DatanodeDescriptor newLocal = secondNode(localMachine, results); if (newLocal != null) { try { return chooseRandom( clusterMap.getRack(newLocal.getNetworkLocation()), excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } catch(NotEnoughReplicasException e2) { //otherwise randomly choose one from the network return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } } else { //otherwise randomly choose one from the network return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } } } @Override protected void chooseRemoteRack(int numOfReplicas, DatanodeDescriptor localMachine, Set<Node> excludedNodes, long blocksize, int maxReplicasPerRack, List<DatanodeStorageInfo> results, boolean avoidStaleNodes, EnumMap<StorageType, Integer> storageTypes) throws NotEnoughReplicasException { int oldNumOfReplicas = results.size(); final String rackLocation = NetworkTopology.getFirstHalf( localMachine.getNetworkLocation()); try { // randomly choose from remote racks chooseRandom(numOfReplicas, "~" + rackLocation, excludedNodes, blocksize, maxReplicasPerRack, results, avoidStaleNodes, storageTypes); } catch (NotEnoughReplicasException e) { // fall back to the local rack chooseRandom(numOfReplicas - (results.size() - oldNumOfReplicas), rackLocation, excludedNodes, blocksize, maxReplicasPerRack, results, avoidStaleNodes, storageTypes); } } /* choose one node from the nodegroup that <i>localMachine</i> is on. * if no such node is available, choose one node from the nodegroup where * a second replica is on. * if still no such node is available, choose a random node in the cluster. * @return the chosen node */ private DatanodeStorageInfo chooseLocalNodeGroup( NetworkTopologyWithNodeGroup clusterMap, Node localMachine, Set<Node> excludedNodes, long blocksize, int maxNodesPerRack, List<DatanodeStorageInfo> results, boolean avoidStaleNodes, EnumMap<StorageType, Integer> storageTypes) throws NotEnoughReplicasException { // no local machine, so choose a random machine if (localMachine == null) { return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } // choose one from the local node group try { return chooseRandom( clusterMap.getNodeGroup(localMachine.getNetworkLocation()), excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } catch (NotEnoughReplicasException e1) { final DatanodeDescriptor newLocal = secondNode(localMachine, results); if (newLocal != null) { try { return chooseRandom( clusterMap.getNodeGroup(newLocal.getNetworkLocation()), excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } catch(NotEnoughReplicasException e2) { //otherwise randomly choose one from the network return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } } else { //otherwise randomly choose one from the network return chooseRandom(NodeBase.ROOT, excludedNodes, blocksize, maxNodesPerRack, results, avoidStaleNodes, storageTypes); } } } @Override protected String getRack(final DatanodeInfo cur) { String nodeGroupString = cur.getNetworkLocation(); return NetworkTopology.getFirstHalf(nodeGroupString); } /** * Find other nodes in the same nodegroup of <i>localMachine</i> and add them * into <i>excludeNodes</i> as replica should not be duplicated for nodes * within the same nodegroup * @return number of new excluded nodes */ @Override protected int addToExcludedNodes(DatanodeDescriptor chosenNode, Set<Node> excludedNodes) { int countOfExcludedNodes = 0; String nodeGroupScope = chosenNode.getNetworkLocation(); List<Node> leafNodes = clusterMap.getLeaves(nodeGroupScope); for (Node leafNode : leafNodes) { if (excludedNodes.add(leafNode)) { // not a existing node in excludedNodes countOfExcludedNodes++; } } countOfExcludedNodes += addDependentNodesToExcludedNodes( chosenNode, excludedNodes); return countOfExcludedNodes; } /** * Add all nodes from a dependent nodes list to excludedNodes. * @return number of new excluded nodes */ private int addDependentNodesToExcludedNodes(DatanodeDescriptor chosenNode, Set<Node> excludedNodes) { if (this.host2datanodeMap == null) { return 0; } int countOfExcludedNodes = 0; for(String hostname : chosenNode.getDependentHostNames()) { DatanodeDescriptor node = this.host2datanodeMap.getDataNodeByHostName(hostname); if(node!=null) { if (excludedNodes.add(node)) { countOfExcludedNodes++; } } else { LOG.warn("Not able to find datanode " + hostname + " which has dependency with datanode " + chosenNode.getHostName()); } } return countOfExcludedNodes; } /** * Pick up replica node set for deleting replica as over-replicated. * First set contains replica nodes on rack with more than one * replica while second set contains remaining replica nodes. * If first is not empty, divide first set into two subsets: * moreThanOne contains nodes on nodegroup with more than one replica * exactlyOne contains the remaining nodes in first set * then pickup priSet if not empty. * If first is empty, then pick second. */ @Override public Collection<DatanodeStorageInfo> pickupReplicaSet( Collection<DatanodeStorageInfo> first, Collection<DatanodeStorageInfo> second) { // If no replica within same rack, return directly. if (first.isEmpty()) { return second; } // Split data nodes in the first set into two sets, // moreThanOne contains nodes on nodegroup with more than one replica // exactlyOne contains the remaining nodes Map<String, List<DatanodeStorageInfo>> nodeGroupMap = new HashMap<String, List<DatanodeStorageInfo>>(); for(DatanodeStorageInfo storage : first) { final String nodeGroupName = NetworkTopology.getLastHalf( storage.getDatanodeDescriptor().getNetworkLocation()); List<DatanodeStorageInfo> storageList = nodeGroupMap.get(nodeGroupName); if (storageList == null) { storageList = new ArrayList<DatanodeStorageInfo>(); nodeGroupMap.put(nodeGroupName, storageList); } storageList.add(storage); } final List<DatanodeStorageInfo> moreThanOne = new ArrayList<DatanodeStorageInfo>(); final List<DatanodeStorageInfo> exactlyOne = new ArrayList<DatanodeStorageInfo>(); // split nodes into two sets for(List<DatanodeStorageInfo> datanodeList : nodeGroupMap.values()) { if (datanodeList.size() == 1 ) { // exactlyOne contains nodes on nodegroup with exactly one replica exactlyOne.add(datanodeList.get(0)); } else { // moreThanOne contains nodes on nodegroup with more than one replica moreThanOne.addAll(datanodeList); } } return moreThanOne.isEmpty()? exactlyOne : moreThanOne; } }
/** * <copyright> * Copyright (c) 2005 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * </copyright> * * $Id: UiextensionmodelSwitch.java,v 1.2 2007/08/09 02:02:15 mchmielewski Exp $ */ package org.eclipse.bpel.ui.uiextensionmodel.util; import java.util.List; import org.eclipse.bpel.ui.uiextensionmodel.*; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.EObject; /** * <!-- begin-user-doc --> * The <b>Switch</b> for the model's inheritance hierarchy. * It supports the call {@link #doSwitch(EObject) doSwitch(object)} * to invoke the <code>caseXXX</code> method for each class of the model, * starting with the actual class of the object * and proceeding up the inheritance hierarchy * until a non-null result is returned, * which is the result of the switch. * <!-- end-user-doc --> * @see org.eclipse.bpel.ui.uiextensionmodel.UiextensionmodelPackage * @generated */ public class UiextensionmodelSwitch<T> { /** * The cached model package * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected static UiextensionmodelPackage modelPackage; /** * Creates an instance of the switch. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public UiextensionmodelSwitch() { if (modelPackage == null) { modelPackage = UiextensionmodelPackage.eINSTANCE; } } /** * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the first non-null result returned by a <code>caseXXX</code> call. * @generated */ public T doSwitch(EObject theEObject) { return doSwitch(theEObject.eClass(), theEObject); } /** * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the first non-null result returned by a <code>caseXXX</code> call. * @generated */ protected T doSwitch(EClass theEClass, EObject theEObject) { if (theEClass.eContainer() == modelPackage) { return doSwitch(theEClass.getClassifierID(), theEObject); } else { List<EClass> eSuperTypes = theEClass.getESuperTypes(); return eSuperTypes.isEmpty() ? defaultCase(theEObject) : doSwitch(eSuperTypes.get(0), theEObject); } } /** * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the first non-null result returned by a <code>caseXXX</code> call. * @generated */ protected T doSwitch(int classifierID, EObject theEObject) { switch (classifierID) { case UiextensionmodelPackage.ACTIVITY_EXTENSION: { ActivityExtension activityExtension = (ActivityExtension)theEObject; T result = caseActivityExtension(activityExtension); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.CASE_EXTENSION: { CaseExtension caseExtension = (CaseExtension)theEObject; T result = caseCaseExtension(caseExtension); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.COPY_EXTENSION: { CopyExtension copyExtension = (CopyExtension)theEObject; T result = caseCopyExtension(copyExtension); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.END_NODE: { EndNode endNode = (EndNode)theEObject; T result = caseEndNode(endNode); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.LINK_EXTENSION: { LinkExtension linkExtension = (LinkExtension)theEObject; T result = caseLinkExtension(linkExtension); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.ON_ALARM_EXTENSION: { OnAlarmExtension onAlarmExtension = (OnAlarmExtension)theEObject; T result = caseOnAlarmExtension(onAlarmExtension); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.ON_EVENT_EXTENSION: { OnEventExtension onEventExtension = (OnEventExtension)theEObject; T result = caseOnEventExtension(onEventExtension); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.ON_MESSAGE_EXTENSION: { OnMessageExtension onMessageExtension = (OnMessageExtension)theEObject; T result = caseOnMessageExtension(onMessageExtension); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.PARTNER_LINK_EXTENSION: { PartnerLinkExtension partnerLinkExtension = (PartnerLinkExtension)theEObject; T result = casePartnerLinkExtension(partnerLinkExtension); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.PROCESS_EXTENSION: { ProcessExtension processExtension = (ProcessExtension)theEObject; T result = caseProcessExtension(processExtension); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.REFERENCE_PARTNER_LINKS: { ReferencePartnerLinks referencePartnerLinks = (ReferencePartnerLinks)theEObject; T result = caseReferencePartnerLinks(referencePartnerLinks); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.START_NODE: { StartNode startNode = (StartNode)theEObject; T result = caseStartNode(startNode); if (result == null) result = defaultCase(theEObject); return result; } case UiextensionmodelPackage.VARIABLE_EXTENSION: { VariableExtension variableExtension = (VariableExtension)theEObject; T result = caseVariableExtension(variableExtension); if (result == null) result = defaultCase(theEObject); return result; } default: return defaultCase(theEObject); } } /** * Returns the result of interpreting the object as an instance of '<em>Activity Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Activity Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseActivityExtension(ActivityExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Case Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Case Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseCaseExtension(CaseExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Copy Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Copy Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseCopyExtension(CopyExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>End Node</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>End Node</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseEndNode(EndNode object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Link Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Link Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseLinkExtension(LinkExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>On Alarm Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>On Alarm Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseOnAlarmExtension(OnAlarmExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>On Event Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>On Event Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseOnEventExtension(OnEventExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>On Message Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>On Message Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseOnMessageExtension(OnMessageExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Partner Link Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Partner Link Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T casePartnerLinkExtension(PartnerLinkExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Process Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Process Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseProcessExtension(ProcessExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Reference Partner Links</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Reference Partner Links</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseReferencePartnerLinks(ReferencePartnerLinks object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Start Node</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Start Node</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseStartNode(StartNode object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Variable Extension</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Variable Extension</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseVariableExtension(VariableExtension object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>EObject</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch, but this is the last case anyway. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>EObject</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) * @generated */ public T defaultCase(EObject object) { return null; } } //UiextensionmodelSwitch
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.json.simple.JSONObject; import org.junit.Ignore; import org.junit.Test; /** * Tests for commit with emphasis on move operations. */ @Ignore public class DocumentMKCommitMoveTest extends BaseDocumentMKTest { @Test public void moveNode() throws Exception { mk.commit("/", "+\"a\" : {}", null, null); assertTrue(mk.nodeExists("/a", null)); mk.commit("/", ">\"a\" : \"b\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/b", null)); } @Test public void moveUnderSourcePath() throws Exception { mk.commit("/", "+\"a\" : { \"b\" : {} }", null, null); assertTrue(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/a/b", null)); try { mk.commit("/", ">\"b\" : \"a\"", null, null); fail("Exception expected"); } catch (Exception expected) { // expected } } @Test public void moveNodeWithChild() throws Exception { mk.commit("/", "+\"a\" : { \"b\" : {} }", null, null); assertTrue(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/a/b", null)); mk.commit("/", ">\"a\" : \"c\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertFalse(mk.nodeExists("/a/b", null)); assertTrue(mk.nodeExists("/c", null)); assertTrue(mk.nodeExists("/c/b", null)); } @Test public void moveNodeWithChildren() throws Exception { mk.commit("/", "+\"a\" : { \"b\" : {}, \"c\" : {}, \"d\" : {}}", null, null); assertTrue(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/a/b", null)); assertTrue(mk.nodeExists("/a/c", null)); assertTrue(mk.nodeExists("/a/d", null)); mk.commit("/", ">\"a\" : \"e\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertFalse(mk.nodeExists("/a/b", null)); assertFalse(mk.nodeExists("/a/c", null)); assertFalse(mk.nodeExists("/a/d", null)); assertTrue(mk.nodeExists("/e", null)); assertTrue(mk.nodeExists("/e/b", null)); assertTrue(mk.nodeExists("/e/c", null)); assertTrue(mk.nodeExists("/e/d", null)); } @Test public void moveNodeWithNestedChildren() throws Exception { mk.commit("/", "+\"a\" : { \"b\" : { \"c\" : { \"d\" : {} } } }", null, null); assertTrue(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/a/b", null)); assertTrue(mk.nodeExists("/a/b/c", null)); assertTrue(mk.nodeExists("/a/b/c/d", null)); mk.commit("/", ">\"a\" : \"e\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertFalse(mk.nodeExists("/a/b", null)); assertFalse(mk.nodeExists("/a/b/c", null)); assertFalse(mk.nodeExists("/a/b/c/d", null)); assertTrue(mk.nodeExists("/e", null)); assertTrue(mk.nodeExists("/e/b", null)); assertTrue(mk.nodeExists("/e/b/c", null)); assertTrue(mk.nodeExists("/e/b/c/d", null)); mk.commit("/", ">\"e/b\" : \"f\"", null, null); assertTrue(mk.nodeExists("/e", null)); assertFalse(mk.nodeExists("/e/b", null)); assertFalse(mk.nodeExists("/e/b/c", null)); assertFalse(mk.nodeExists("/e/b/c/d", null)); assertTrue(mk.nodeExists("/f", null)); assertTrue(mk.nodeExists("/f/c", null)); assertTrue(mk.nodeExists("/f/c/d", null)); } @Test public void moveNodeWithProperties() throws Exception { mk.commit("/", "+\"a\" : { \"key1\" : \"value1\" }", null, null); assertTrue(mk.nodeExists("/a", null)); String nodes = mk.getNodes("/a", null, 0 /*depth*/, 0 /*offset*/, -1 /*maxChildNodes*/, null /*filter*/); JSONObject obj = parseJSONObject(nodes); assertPropertyValue(obj, "key1", "value1"); mk.commit("/", ">\"a\" : \"c\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/c", null)); nodes = mk.getNodes("/c", null, 0 /*depth*/, 0 /*offset*/, -1 /*maxChildNodes*/, null /*filter*/); obj = parseJSONObject(nodes); assertPropertyValue(obj, "key1", "value1"); } @Test public void moveFromNonExistentNode() throws Exception { try { mk.commit("/", ">\"b\" : \"c\"", null, null); fail("Exception expected"); } catch (Exception expected) { // expected } } @Test public void moveToAnExistentNode() throws Exception { mk.commit("/", "+\"a\" : { \"b\" : {} }", null, null); mk.commit("/", "+\"c\" : {}", null, null); try { mk.commit("/", ">\"c\" : \"a/b\"", null, null); fail("Exception expected"); } catch (Exception expected) { // expected } } @Test public void addNodeAndMove() { mk.commit("/", "+\"a\":{}", null, null); mk.commit("/", "+\"a/b\": {}\n" + ">\"a/b\":\"c\"", null, null); assertFalse(mk.nodeExists("/a/b", null)); assertTrue(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/c", null)); } @Test public void addNodeAndMove2() { mk.commit("/", "+\"a\":{}", null, null); mk.commit("/", "+\"a/b\": {}\n", null, null); mk.commit("/", ">\"a/b\":\"c\"", null, null); assertFalse(mk.nodeExists("/a/b", null)); assertTrue(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/c", null)); } @Test public void addNodeWithChildrenAndMove() { mk.commit("/", "+\"a\":{}", null, null); mk.commit("/", "+\"a/b\":{ \"c\" : {}, \"d\" : {} }\n" + ">\"a/b\":\"e\"", null, null); assertTrue(mk.nodeExists("/a", null)); assertFalse(mk.nodeExists("/a/b", null)); assertFalse(mk.nodeExists("/a/b/c", null)); assertFalse(mk.nodeExists("/a/b/d", null)); assertTrue(mk.nodeExists("/e", null)); assertTrue(mk.nodeExists("/e/c", null)); assertTrue(mk.nodeExists("/e/d", null)); } @Test public void addNodeWithNestedChildrenAndMove() { mk.commit("/", "+\"a\":{ \"b\" : { \"c\" : { } } }", null, null); mk.commit("/", "+\"a/b/c/d\":{}\n" + ">\"a\":\"e\"", null, null); assertFalse(mk.nodeExists("/a/b/c/d", null)); assertTrue(mk.nodeExists("/e/b/c/d", null)); } @Test public void addNodeAndMoveParent() { mk.commit("/", "+\"a\":{}", null, null); mk.commit("/", "+\"a/b\":{}\n" + ">\"a\":\"c\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertFalse(mk.nodeExists("/a/b", null)); assertTrue(mk.nodeExists("/c", null)); assertTrue(mk.nodeExists("/c/b", null)); } @Test public void removeNodeAndMove() { mk.commit("/", "+\"a\":{ \"b\" : {} }", null, null); try { mk.commit("/", "-\"a/b\"\n" + ">\"a/b\":\"c\"", null, null); fail("Expected expected"); } catch (Exception expected) { // expected } } @Test public void removeNodeWithNestedChildrenAndMove() { mk.commit("/", "+\"a\":{ \"b\" : { \"c\" : { \"d\" : {} } } }", null, null); mk.commit("/", "-\"a/b/c/d\"\n" + ">\"a\" : \"e\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/e/b/c", null)); assertFalse(mk.nodeExists("/e/b/c/d", null)); } @Test public void removeNodeAndMoveParent() { mk.commit("/", "+\"a\":{ \"b\" : {} }", null, null); mk.commit("/", "-\"a/b\"\n" + ">\"a\":\"c\"", null, null); assertFalse(mk.nodeExists("/a/b", null)); assertTrue(mk.nodeExists("/c", null)); assertFalse(mk.nodeExists("/c/b", null)); } @Test public void setPropertyAndMove() { mk.commit("/", "+\"a\":{}", null, null); mk.commit("/", "^\"a/key1\": \"value1\"\n" + ">\"a\":\"c\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/c", null)); String nodes = mk.getNodes("/c", null, 0 /*depth*/, 0 /*offset*/, -1 /*maxChildNodes*/, null /*filter*/); JSONObject obj = parseJSONObject(nodes); assertPropertyValue(obj, "key1", "value1"); } @Test public void setNestedPropertyAndMove() { mk.commit("/", "+\"a\":{ \"b\" : {} }", null, null); mk.commit("/", "^\"a/b/key1\": \"value1\"\n" + ">\"a\":\"c\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertFalse(mk.nodeExists("/a/b", null)); assertTrue(mk.nodeExists("/c", null)); assertTrue(mk.nodeExists("/c/b", null)); String nodes = mk.getNodes("/c/b", null, 0 /*depth*/, 0 /*offset*/, -1 /*maxChildNodes*/, null /*filter*/); JSONObject obj = parseJSONObject(nodes); assertPropertyValue(obj, "key1", "value1"); } @Test public void modifyParentAddPropertyAndMove() { mk.commit("/", "+\"a\":{}", null, null); mk.commit("/", "+\"b\" : {}\n" + "^\"a/key1\": \"value1\"\n" + ">\"a\":\"c\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/b", null)); assertTrue(mk.nodeExists("/c", null)); String nodes = mk.getNodes("/c", null, 0 /*depth*/, 0 /*offset*/, -1 /*maxChildNodes*/, null /*filter*/); JSONObject obj = parseJSONObject(nodes); assertPropertyValue(obj, "key1", "value1"); } @Test public void removePropertyAndMove() { mk.commit("/", "+\"a\":{ \"b\" : { \"key1\" : \"value1\" } }", null, null); mk.commit("/", "^\"a/b/key1\": null\n" + ">\"a\":\"c\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertFalse(mk.nodeExists("/a/b", null)); assertTrue(mk.nodeExists("/c", null)); assertTrue(mk.nodeExists("/c/b", null)); String nodes = mk.getNodes("/c/b", null, 0 /*depth*/, 0 /*offset*/, -1 /*maxChildNodes*/, null /*filter*/); JSONObject obj = parseJSONObject(nodes); assertPropertyNotExists(obj, "key1"); } @Test public void removeNestedPropertyAndMove() { mk.commit("/", "+\"a\":{ \"key1\" : \"value1\"}", null, null); mk.commit("/", "^\"a/key1\" : null\n" + ">\"a\":\"c\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/c", null)); String nodes = mk.getNodes("/c", null, 0 /*depth*/, 0 /*offset*/, -1 /*maxChildNodes*/, null /*filter*/); JSONObject obj = parseJSONObject(nodes); assertPropertyNotExists(obj, "key1"); } @Test public void modifyParentRemovePropertyAndMove() { mk.commit("/", "+\"a\":{ \"key1\" : \"value1\"}", null, null); mk.commit("/", "+\"b\" : {}\n" + "^\"a/key1\" : null\n" + ">\"a\":\"c\"", null, null); assertFalse(mk.nodeExists("/a", null)); assertTrue(mk.nodeExists("/b", null)); assertTrue(mk.nodeExists("/c", null)); String nodes = mk.getNodes("/c", null, 0 /*depth*/, 0 /*offset*/, -1 /*maxChildNodes*/, null /*filter*/); JSONObject obj = parseJSONObject(nodes); assertPropertyNotExists(obj, "key1"); } @Test public void moveAndMoveBack() { mk.commit("/", "+\"a\":{}", null, null); mk.commit("/", ">\"a\":\"x\">\"x\":\"a\"", null, null); assertNodesExist(null, "/a"); } @Test public void moveAndMoveBackWithChildren() { mk.commit("/", "+\"a\":{\"b\":{}}", null, null); mk.commit("/", ">\"a\":\"x\">\"x\":\"a\"", null, null); assertNodesExist(null, "/a", "/a/b"); } @Test public void moveAndMoveBackWithAddedChildren() { mk.commit("/", "+\"a\":{\"b\":{}}", null, null); mk.commit("/", ">\"a\":\"x\"+\"x/c\":{}>\"x\":\"a\"", null, null); assertNodesExist(null, "/a", "/a/b", "/a/c"); } @Test public void moveAndMoveBackWithSetProperties() { mk.commit("/", "+\"a\":{\"b\":{}}", null, null); mk.commit("/", ">\"a\":\"x\"^\"x/p\":1>\"x\":\"a\"", null, null); assertNodesExist(null, "/a", "/a/b"); assertPropExists(null, "/a", "p"); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Collection; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeoutException; import org.apache.commons.lang.ArrayUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutor; import org.apache.cassandra.db.filter.QueryFilter; import org.apache.cassandra.db.filter.QueryPath; import org.apache.cassandra.gms.FailureDetector; import org.apache.cassandra.gms.Gossiper; import org.apache.cassandra.net.Message; import org.apache.cassandra.net.MessagingService; import org.apache.cassandra.service.DigestMismatchException; import org.apache.cassandra.service.StorageService; import org.apache.cassandra.service.WriteResponseHandler; import org.apache.cassandra.service.IWriteResponseHandler; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.cassandra.utils.WrappedRunnable; import org.cliffc.high_scale_lib.NonBlockingHashSet; import static com.google.common.base.Charsets.UTF_8; /** * For each endpoint for which we have hints, there is a row in the system hints CF. * SuperColumns in that row are keys for which we have hinted data. * Subcolumns names within that supercolumn are keyspace+CF, concatenated with SEPARATOR. * Subcolumn values are always empty; instead, we store the row data "normally" * in the application table it belongs in. * * When FailureDetector signals that a node that was down is back up, we read its * hints row to see what rows we need to forward data for, then reach each row in its * entirety and send it over. * * deliverHints is also exposed to JMX so it can be run manually if FD ever misses * its cue somehow. * * HHM never deletes the row from Application tables; there is no way to distinguish that * from hinted tombstones! instead, rely on cleanup compactions to remove data * that doesn't belong on this node. (Cleanup compactions may be started manually * -- on a per node basis -- with "nodeprobe cleanup.") * * TODO this avoids our hint rows from growing excessively large by offloading the * message data into application tables. But, this means that cleanup compactions * will nuke HH data. Probably better would be to store the RowMutation messages * in a HHData (non-super) CF, modifying the above to store a UUID value in the * HH subcolumn value, which we use as a key to a [standard] HHData system CF * that would contain the message bytes. */ public class HintedHandOffManager { public static final HintedHandOffManager instance = new HintedHandOffManager(); private static final Logger logger_ = LoggerFactory.getLogger(HintedHandOffManager.class); public static final String HINTS_CF = "HintsColumnFamily"; private static final int PAGE_SIZE = 10000; private static final String SEPARATOR = "-"; private final NonBlockingHashSet<InetAddress> queuedDeliveries = new NonBlockingHashSet<InetAddress>(); private final ExecutorService executor_; public HintedHandOffManager() { int hhPriority = System.getProperty("cassandra.compaction.priority") == null ? Thread.NORM_PRIORITY : Integer.parseInt(System.getProperty("cassandra.compaction.priority")); executor_ = new JMXEnabledThreadPoolExecutor("HINTED-HANDOFF-POOL", hhPriority); } private static boolean sendMessage(InetAddress endpoint, String tableName, String cfName, byte[] key) throws IOException { if (!Gossiper.instance.isKnownEndpoint(endpoint)) { logger_.warn("Hints found for endpoint " + endpoint + " which is not part of the gossip network. discarding."); return true; } if (!FailureDetector.instance.isAlive(endpoint)) { return false; } Table table = Table.open(tableName); DecoratedKey dkey = StorageService.getPartitioner().decorateKey(key); ColumnFamilyStore cfs = table.getColumnFamilyStore(cfName); byte[] startColumn = ArrayUtils.EMPTY_BYTE_ARRAY; while (true) { QueryFilter filter = QueryFilter.getSliceFilter(dkey, new QueryPath(cfs.getColumnFamilyName()), startColumn, ArrayUtils.EMPTY_BYTE_ARRAY, false, PAGE_SIZE); ColumnFamily cf = cfs.getColumnFamily(filter); if (pagingFinished(cf, startColumn)) break; if (cf.getColumnNames().isEmpty()) { logger_.debug("Nothing to hand off for {}", dkey); break; } startColumn = cf.getColumnNames().last(); RowMutation rm = new RowMutation(tableName, key); rm.add(cf); Message message = rm.makeRowMutationMessage(); IWriteResponseHandler responseHandler = WriteResponseHandler.create(endpoint); MessagingService.instance.sendRR(message, new InetAddress[] { endpoint }, responseHandler); try { responseHandler.get(); } catch (TimeoutException e) { return false; } } return true; } private static void deleteHintKey(byte[] endpointAddress, byte[] key, byte[] tableCF, IClock clock) throws IOException { RowMutation rm = new RowMutation(Table.SYSTEM_TABLE, endpointAddress); rm.delete(new QueryPath(HINTS_CF, key, tableCF), clock); rm.apply(); } public static void deleteHintsForEndPoint(InetAddress endpoint) { ColumnFamilyStore hintStore = Table.open(Table.SYSTEM_TABLE).getColumnFamilyStore(HINTS_CF); RowMutation rm = new RowMutation(Table.SYSTEM_TABLE, endpoint.getAddress()); rm.delete(new QueryPath(HINTS_CF), new TimestampClock(System.currentTimeMillis())); try { logger_.info("Deleting any stored hints for " + endpoint); rm.apply(); hintStore.forceFlush(); CompactionManager.instance.submitMajor(hintStore, 0, Integer.MAX_VALUE).get(); } catch (Exception e) { logger_.warn("Could not delete hints for " + endpoint + ": " + e); } } private static boolean pagingFinished(ColumnFamily hintColumnFamily, byte[] startColumn) { // done if no hints found or the start column (same as last column processed in previous iteration) is the only one return hintColumnFamily == null || (hintColumnFamily.getSortedColumns().size() == 1 && hintColumnFamily.getColumn(startColumn) != null); } public static byte[] makeCombinedName(String tableName, String columnFamily) { byte[] withsep = ArrayUtils.addAll(tableName.getBytes(UTF_8), SEPARATOR.getBytes()); return ArrayUtils.addAll(withsep, columnFamily.getBytes(UTF_8)); } private static String[] getTableAndCFNames(byte[] joined) { int index; index = ArrayUtils.lastIndexOf(joined, SEPARATOR.getBytes()[0]); if (index < 1) throw new RuntimeException("Corrupted hint name " + joined.toString()); String[] parts = new String[2]; parts[0] = new String(ArrayUtils.subarray(joined, 0, index)); parts[1] = new String(ArrayUtils.subarray(joined, index+1, joined.length)); return parts; } private void deliverHintsToEndpoint(InetAddress endpoint) throws IOException, DigestMismatchException, InvalidRequestException, TimeoutException { logger_.info("Started hinted handoff for endpoint " + endpoint); queuedDeliveries.remove(endpoint); // 1. Get the key of the endpoint we need to handoff // 2. For each column read the list of rows: subcolumns are KS + SEPARATOR + CF // 3. Delete the subcolumn if the write was successful // 4. Force a flush // 5. Do major compaction to clean up all deletes etc. DecoratedKey epkey = StorageService.getPartitioner().decorateKey(endpoint.getHostAddress().getBytes(UTF_8)); int rowsReplayed = 0; ColumnFamilyStore hintStore = Table.open(Table.SYSTEM_TABLE).getColumnFamilyStore(HINTS_CF); byte[] startColumn = ArrayUtils.EMPTY_BYTE_ARRAY; delivery: while (true) { QueryFilter filter = QueryFilter.getSliceFilter(epkey, new QueryPath(HINTS_CF), startColumn, ArrayUtils.EMPTY_BYTE_ARRAY, false, PAGE_SIZE); ColumnFamily hintColumnFamily = ColumnFamilyStore.removeDeleted(hintStore.getColumnFamily(filter), Integer.MAX_VALUE); if (pagingFinished(hintColumnFamily, startColumn)) break; Collection<IColumn> keyColumns = hintColumnFamily.getSortedColumns(); for (IColumn keyColumn : keyColumns) { startColumn = keyColumn.name(); Collection<IColumn> tableCFs = keyColumn.getSubColumns(); for (IColumn tableCF : tableCFs) { String[] parts = getTableAndCFNames(tableCF.name()); if (sendMessage(endpoint, parts[0], parts[1], keyColumn.name())) { deleteHintKey(endpoint.getHostAddress().getBytes(UTF_8), keyColumn.name(), tableCF.name(), tableCF.clock()); rowsReplayed++; } else { logger_.info("Could not complete hinted handoff to " + endpoint); break delivery; } startColumn = keyColumn.name(); } } } if (rowsReplayed > 0) { hintStore.forceFlush(); try { CompactionManager.instance.submitMajor(hintStore, 0, Integer.MAX_VALUE).get(); } catch (Exception e) { throw new RuntimeException(e); } } logger_.info(String.format("Finished hinted handoff of %s rows to endpoint %s", rowsReplayed, endpoint)); } /** called when a keyspace is dropped or rename. newTable==null in the case of a drop. */ public static void renameHints(String oldTable, String newTable) throws IOException { DecoratedKey oldTableKey = StorageService.getPartitioner().decorateKey(oldTable.getBytes(UTF_8)); // we're basically going to fetch, drop and add the scf for the old and new table. we need to do it piecemeal // though since there could be GB of data. ColumnFamilyStore hintStore = Table.open(Table.SYSTEM_TABLE).getColumnFamilyStore(HINTS_CF); byte[] startCol = ArrayUtils.EMPTY_BYTE_ARRAY; long now = System.currentTimeMillis(); while (true) { QueryFilter filter = QueryFilter.getSliceFilter(oldTableKey, new QueryPath(HINTS_CF), startCol, ArrayUtils.EMPTY_BYTE_ARRAY, false, PAGE_SIZE); ColumnFamily cf = ColumnFamilyStore.removeDeleted(hintStore.getColumnFamily(filter), Integer.MAX_VALUE); if (pagingFinished(cf, startCol)) break; if (newTable != null) { RowMutation insert = new RowMutation(Table.SYSTEM_TABLE, newTable.getBytes(UTF_8)); insert.add(cf); insert.apply(); } RowMutation drop = new RowMutation(Table.SYSTEM_TABLE, oldTableKey.key); for (byte[] key : cf.getColumnNames()) { drop.delete(new QueryPath(HINTS_CF, key), new TimestampClock(now)); startCol = key; } drop.apply(); } } /* * This method is used to deliver hints to a particular endpoint. * When we learn that some endpoint is back up we deliver the data * to him via an event driven mechanism. */ public void deliverHints(final InetAddress to) { if (!queuedDeliveries.add(to)) return; Runnable r = new WrappedRunnable() { public void runMayThrow() throws Exception { deliverHintsToEndpoint(to); } }; executor_.submit(r); } public void deliverHints(String to) throws UnknownHostException { deliverHints(InetAddress.getByName(to)); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/devtools/clouderrorreporting/v1beta1/common.proto package com.google.devtools.clouderrorreporting.v1beta1; /** * * * <pre> * Information related to tracking the progress on resolving the error. * </pre> * * Protobuf type {@code google.devtools.clouderrorreporting.v1beta1.TrackingIssue} */ public final class TrackingIssue extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.devtools.clouderrorreporting.v1beta1.TrackingIssue) TrackingIssueOrBuilder { private static final long serialVersionUID = 0L; // Use TrackingIssue.newBuilder() to construct. private TrackingIssue(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private TrackingIssue() { url_ = ""; } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private TrackingIssue( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); url_ = s; break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.clouderrorreporting.v1beta1.CommonProto .internal_static_google_devtools_clouderrorreporting_v1beta1_TrackingIssue_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.clouderrorreporting.v1beta1.CommonProto .internal_static_google_devtools_clouderrorreporting_v1beta1_TrackingIssue_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue.class, com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue.Builder.class); } public static final int URL_FIELD_NUMBER = 1; private volatile java.lang.Object url_; /** * * * <pre> * A URL pointing to a related entry in an issue tracking system. * Example: https://github.com/user/project/issues/4 * </pre> * * <code>string url = 1;</code> */ public java.lang.String getUrl() { java.lang.Object ref = url_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); url_ = s; return s; } } /** * * * <pre> * A URL pointing to a related entry in an issue tracking system. * Example: https://github.com/user/project/issues/4 * </pre> * * <code>string url = 1;</code> */ public com.google.protobuf.ByteString getUrlBytes() { java.lang.Object ref = url_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); url_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getUrlBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, url_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getUrlBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, url_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue)) { return super.equals(obj); } com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue other = (com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue) obj; boolean result = true; result = result && getUrl().equals(other.getUrl()); result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + URL_FIELD_NUMBER; hash = (53 * hash) + getUrl().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Information related to tracking the progress on resolving the error. * </pre> * * Protobuf type {@code google.devtools.clouderrorreporting.v1beta1.TrackingIssue} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.devtools.clouderrorreporting.v1beta1.TrackingIssue) com.google.devtools.clouderrorreporting.v1beta1.TrackingIssueOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.devtools.clouderrorreporting.v1beta1.CommonProto .internal_static_google_devtools_clouderrorreporting_v1beta1_TrackingIssue_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.devtools.clouderrorreporting.v1beta1.CommonProto .internal_static_google_devtools_clouderrorreporting_v1beta1_TrackingIssue_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue.class, com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue.Builder.class); } // Construct using com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); url_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.devtools.clouderrorreporting.v1beta1.CommonProto .internal_static_google_devtools_clouderrorreporting_v1beta1_TrackingIssue_descriptor; } @java.lang.Override public com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue getDefaultInstanceForType() { return com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue.getDefaultInstance(); } @java.lang.Override public com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue build() { com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue buildPartial() { com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue result = new com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue(this); result.url_ = url_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue) { return mergeFrom((com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue other) { if (other == com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue.getDefaultInstance()) return this; if (!other.getUrl().isEmpty()) { url_ = other.url_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object url_ = ""; /** * * * <pre> * A URL pointing to a related entry in an issue tracking system. * Example: https://github.com/user/project/issues/4 * </pre> * * <code>string url = 1;</code> */ public java.lang.String getUrl() { java.lang.Object ref = url_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); url_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A URL pointing to a related entry in an issue tracking system. * Example: https://github.com/user/project/issues/4 * </pre> * * <code>string url = 1;</code> */ public com.google.protobuf.ByteString getUrlBytes() { java.lang.Object ref = url_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); url_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A URL pointing to a related entry in an issue tracking system. * Example: https://github.com/user/project/issues/4 * </pre> * * <code>string url = 1;</code> */ public Builder setUrl(java.lang.String value) { if (value == null) { throw new NullPointerException(); } url_ = value; onChanged(); return this; } /** * * * <pre> * A URL pointing to a related entry in an issue tracking system. * Example: https://github.com/user/project/issues/4 * </pre> * * <code>string url = 1;</code> */ public Builder clearUrl() { url_ = getDefaultInstance().getUrl(); onChanged(); return this; } /** * * * <pre> * A URL pointing to a related entry in an issue tracking system. * Example: https://github.com/user/project/issues/4 * </pre> * * <code>string url = 1;</code> */ public Builder setUrlBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); url_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.devtools.clouderrorreporting.v1beta1.TrackingIssue) } // @@protoc_insertion_point(class_scope:google.devtools.clouderrorreporting.v1beta1.TrackingIssue) private static final com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue(); } public static com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<TrackingIssue> PARSER = new com.google.protobuf.AbstractParser<TrackingIssue>() { @java.lang.Override public TrackingIssue parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new TrackingIssue(input, extensionRegistry); } }; public static com.google.protobuf.Parser<TrackingIssue> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<TrackingIssue> getParserForType() { return PARSER; } @java.lang.Override public com.google.devtools.clouderrorreporting.v1beta1.TrackingIssue getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package docking.widgets.fieldpanel.support; import java.awt.Graphics; import java.awt.Rectangle; import javax.swing.JComponent; import docking.widgets.fieldpanel.Layout; import docking.widgets.fieldpanel.field.Field; import docking.widgets.fieldpanel.internal.*; /** * Handles layouts with muliple rows. */ public class MultiRowLayout implements Layout { private RowLayout[] layouts; private int[] offsets; private int numFields; private int heightAbove; private int heightBelow; private int primaryOffset = -1; private int indexSize = 1; /** * Constructs a new MultiRowLayout with a single layout row. * @param layout the single layout to add to this MultiRowLayout. */ public MultiRowLayout(RowLayout layout, int indexSize) { this.indexSize = indexSize; numFields = layout.getNumFields(); layouts = new RowLayout[1]; layouts[0] = layout; heightAbove = layouts[0].getHeightAbove(); heightBelow = layouts[0].getHeightBelow(); buildOffsets(); } public MultiRowLayout(RowLayout[] layouts, int indexSize) { this.indexSize = indexSize; this.layouts = layouts; int height = 0; for (int i = 0; i < layouts.length; i++) { numFields += layouts[i].getNumFields(); height += layouts[i].getHeight(); } heightAbove = layouts[0].getHeightAbove(); heightBelow = height - heightAbove; buildOffsets(); } private void buildOffsets() { offsets = new int[layouts.length + 1]; int soFar = 0; for (int i = 0; i < layouts.length; i++) { offsets[i] = soFar; soFar += layouts[i].getNumFields(); } offsets[layouts.length] = soFar; } @Override public int getHeight() { return heightAbove + heightBelow; } @Override public int getCompressableWidth() { // // Since this is a multi-row layout, we have to make sure that our compressible width // is the largest of all rows so that the longest row doesn't get clipped. // int max = 0; for (Layout layout : layouts) { max = Math.max(max, layout.getCompressableWidth()); } return max; } @Override public int getNumFields() { return numFields; } @Override public Field getField(int index) { for (int i = 0; i < layouts.length; i++) { if (index < offsets[i + 1]) { return layouts[i].getField(index - offsets[i]); } } return null; } @Override public void paint(JComponent c, Graphics g, PaintContext context, Rectangle rect, LayoutBackgroundColorManager colorManager, FieldLocation cursorLocation) { int totalShift = 0; int offset = 0; LayoutBackgroundColorManagerAdapter shiftedColorManager = new LayoutBackgroundColorManagerAdapter(colorManager); for (int i = 0; i < layouts.length; i++) { g.translate(0, offset); totalShift += offset; rect.y -= offset; shiftedColorManager.setRange(offsets[i], offsets[i + 1], i == layouts.length - 1); FieldLocation shiftedCursorLocation = null; if (cursorLocation != null) { int shiftedFieldNum = cursorLocation.fieldNum - offsets[i]; if (shiftedFieldNum >= 0 && shiftedFieldNum < offsets[i + 1]) { shiftedCursorLocation = new FieldLocation(cursorLocation); shiftedCursorLocation.fieldNum = shiftedFieldNum; } } layouts[i].paint(c, g, context, rect, shiftedColorManager, shiftedCursorLocation); offset = layouts[i].getHeight(); } g.translate(0, -totalShift); rect.y += totalShift; } @Override public int setCursor(FieldLocation cursorLoc, int x, int y) { int offset = 0; for (int i = 0; i < layouts.length; i++) { if (layouts[i].contains(y - offset)) { int lastX = layouts[i].setCursor(cursorLoc, x, y - offset); cursorLoc.fieldNum += offsets[i]; return lastX; } offset += layouts[i].getHeight(); } return layouts[0].setCursor(cursorLoc, x, y); } @Override public Rectangle getCursorRect(int fieldNum, int row, int col) { int offset = 0; for (int i = 0; i < layouts.length; i++) { if (fieldNum < offsets[i + 1]) { Rectangle rect = layouts[i].getCursorRect(fieldNum - offsets[i], row, col); if (rect != null) { rect.y += offset; } return rect; } offset += layouts[i].getHeight(); } return null; } @Override public boolean cursorUp(FieldLocation cursorLoc, int lastX) { int row = findRow(cursorLoc); cursorLoc.fieldNum -= offsets[row]; boolean result = layouts[row].cursorUp(cursorLoc, lastX); cursorLoc.fieldNum += offsets[row]; if (!result) { if (row == 0) { return false; } cursorLoc.fieldNum -= offsets[--row]; result = layouts[row].enterLayout(cursorLoc, lastX, false); cursorLoc.fieldNum += offsets[row]; } return result; } @Override public boolean cursorDown(FieldLocation cursorLoc, int lastX) { int row = findRow(cursorLoc); cursorLoc.fieldNum -= offsets[row]; boolean result = layouts[row].cursorDown(cursorLoc, lastX); cursorLoc.fieldNum += offsets[row]; if (!result) { if (row >= layouts.length - 1) { return false; } cursorLoc.fieldNum -= offsets[++row]; result = layouts[row].enterLayout(cursorLoc, lastX, true); cursorLoc.fieldNum += offsets[row]; } return result; } @Override public int cursorBeginning(FieldLocation cursorLoc) { int row = findRow(cursorLoc); cursorLoc.fieldNum -= offsets[row]; int lastX = layouts[row].cursorBeginning(cursorLoc); cursorLoc.fieldNum += offsets[row]; return lastX; } @Override public int cursorEnd(FieldLocation cursorLoc) { int row = findRow(cursorLoc); cursorLoc.fieldNum -= offsets[row]; int lastX = layouts[row].cursorEnd(cursorLoc); cursorLoc.fieldNum += offsets[row]; return lastX; } @Override public int cursorLeft(FieldLocation cursorLoc) { int row = findRow(cursorLoc); cursorLoc.fieldNum -= offsets[row]; int returnVal = layouts[row].cursorLeft(cursorLoc); cursorLoc.fieldNum += offsets[row]; if (returnVal < 0) { if (row == 0) { return -1; } cursorLoc.fieldNum -= offsets[--row]; returnVal = layouts[row].cursorEnd(cursorLoc); cursorLoc.fieldNum += offsets[row]; } return returnVal; } @Override public int cursorRight(FieldLocation cursorLoc) { int row = findRow(cursorLoc); cursorLoc.fieldNum -= offsets[row]; int returnVal = layouts[row].cursorRight(cursorLoc); cursorLoc.fieldNum += offsets[row]; if (returnVal < 0) { if (row >= layouts.length - 1) { return -1; } cursorLoc.fieldNum -= offsets[++row]; returnVal = layouts[row].cursorBeginning(cursorLoc); cursorLoc.fieldNum += offsets[row]; } return returnVal; } @Override public boolean enterLayout(FieldLocation cursorLoc, int lastX, boolean fromTop) { if (fromTop) { return layouts[0].enterLayout(cursorLoc, lastX, fromTop); } cursorLoc.fieldNum -= offsets[layouts.length - 1]; boolean result = layouts[layouts.length - 1].enterLayout(cursorLoc, lastX, fromTop); cursorLoc.fieldNum += offsets[layouts.length - 1]; return result; } @Override public int getScrollableUnitIncrement(int topOfScreen, int direction) { int searchPoint = topOfScreen; if (direction < 0) { searchPoint--; } int offset = 0; for (int i = 0; i < layouts.length - 1; i++) { if (layouts[i].contains(searchPoint - offset)) { return layouts[i].getScrollableUnitIncrement(topOfScreen - offset, direction); } offset += layouts[i].getHeight(); } return layouts[layouts.length - 1].getScrollableUnitIncrement(topOfScreen - offset, direction); } @Override public boolean contains(int yPos) { if ((yPos >= 0) && (yPos < heightAbove + heightBelow)) { return true; } return false; } /** * Returns the row containing the given FieldLocation. */ private int findRow(FieldLocation loc) { for (int i = 0; i < layouts.length; i++) { if (loc.fieldNum < offsets[i + 1]) { return i; } } return 0; } /** * <CODE>synchronize</CODE> adjusts this layout and the layout passed as a * parameter by adding and resizing rows as necessary to make their * vertical layouts match. * * @param layout the other multi-row layout that is to be synchronized with. * @param dummyField empty field used for spacing. */ public void align(MultiRowLayout layout, Field dummyField) { int myNumRows = layouts.length; int otherNumRows = layout.layouts.length; int myRow = 0; int otherRow = 0; int myHeight = layouts[myRow].getHeight(); int otherHeight = layout.layouts[otherRow].getHeight(); primaryOffset = -1; while (myRow < myNumRows && otherRow < otherNumRows) { int myId = layouts[myRow].getRowID(); int otherId = layout.layouts[otherRow].getRowID(); if (myId < otherId) { layout.layouts[otherRow].insertSpaceAbove(myHeight); if (++myRow < myNumRows) myHeight = layouts[myRow].getHeight(); } else if (myId > otherId) { layouts[myRow].insertSpaceAbove(otherHeight); if (++otherRow < otherNumRows) otherHeight = layout.layouts[otherRow].getHeight(); } else { int myEnd = layouts[myRow].getHeight(); int otherEnd = layout.layouts[otherRow].getHeight(); if (myEnd > otherEnd) { layout.layouts[otherRow].insertSpaceBelow(myEnd - otherEnd); } else if (otherEnd > myEnd) { layouts[myRow].insertSpaceBelow(otherEnd - myEnd); } if (++myRow < myNumRows) myHeight = layouts[myRow].getHeight(); if (++otherRow < otherNumRows) otherHeight = layout.layouts[otherRow].getHeight(); } } while (myRow < myNumRows) { layout.layouts[otherNumRows - 1].insertSpaceBelow(myHeight); if (++myRow < myNumRows) myHeight = layouts[myRow].getHeight(); } while (otherRow < otherNumRows) { layouts[myNumRows - 1].insertSpaceBelow(otherHeight); if (++otherRow < otherNumRows) otherHeight = layout.layouts[otherRow].getHeight(); } } @Override public int getPrimaryOffset() { if (primaryOffset == -1) { findPrimaryOffset(); } return primaryOffset; } private void findPrimaryOffset() { primaryOffset = 0; for (int i = 0; i < layouts.length; i++) { if (layouts[i].isPrimary()) { return; } primaryOffset += layouts[i].getHeight(); } primaryOffset = 0; } @Override public Rectangle getFieldBounds(int index) { int offset = 0; for (int i = 0; i < layouts.length; i++) { if (index < offsets[i + 1]) { Rectangle rect = layouts[i].getFieldBounds(index - offsets[i]); rect.y += offset; return rect; } offset += layouts[i].getHeight(); } return null; } @Override public void insertSpaceAbove(int size) { layouts[0].insertSpaceAbove(size); heightAbove += size; } @Override public void insertSpaceBelow(int size) { layouts[layouts.length - 1].insertSpaceBelow(size); heightBelow += size; } /** * Fills in the given array with the heights of all the layouts in the MultiRowLayout * @param rowHeights the array to be filled in with heights. Each height is stored at * its layoutRow id as the index into the array. */ public void fillHeights(int[] rowHeights) { int lastId = -1; int height = 0; for (int i = 0; i < layouts.length; i++) { int id = layouts[i].getRowID(); if (id == lastId) { height += layouts[i].getHeight(); } else { if (lastId >= 0) { rowHeights[lastId] = Math.max(rowHeights[lastId], height); } lastId = id; height = layouts[i].getHeight(); } } if (lastId >= 0) { rowHeights[lastId] = Math.max(rowHeights[lastId], height); } } /** * Aligns the heights in this MultiRowLayout to match those in the give row heights array. * Extra is inserted to align the rows in this layout to match those specified in the given array. * @param rowHeights the aray of row height to align to. */ public void align(int[] rowHeights) { int row = 0; int totalAbove = 0; int lastId = -1; for (int i = 0; i < layouts.length; i++) { int id = layouts[i].getRowID(); if (id != lastId) { for (; row < id; row++) { totalAbove += rowHeights[row]; } int origHeight = layouts[i].getHeight(); layouts[i].insertSpaceAbove(totalAbove); totalAbove = rowHeights[id] - origHeight; lastId = id; row++; } else { totalAbove -= layouts[i].getHeight(); } } int totalBelow = totalAbove; for (; row < rowHeights.length; row++) { totalBelow += rowHeights[row]; } insertSpaceBelow(totalBelow); int height = 0; for (int i = 0; i < layouts.length; i++) { height += layouts[i].getHeight(); } heightAbove = layouts[0].getHeightAbove(); heightBelow = height - heightAbove; } @Override public int getIndexSize() { return indexSize; } @Override public int getBeginRowFieldNum(int fieldIndex) { for (int i = 0; i < layouts.length; i++) { if (fieldIndex < offsets[i + 1]) { return offsets[i]; } } return offsets[layouts.length - 1]; } @Override public int getEndRowFieldNum(int fieldIndex) { for (int i = 0; i < layouts.length; i++) { if (fieldIndex < offsets[i + 1]) { return offsets[i + 1]; } } return offsets[layouts.length]; } public int getFirstRowID() { return layouts[0].getRowID(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.applicationhistoryservice; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.file.tfile.TFile; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto; import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto; import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto; import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto; import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto; import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptFinishDataPBImpl; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptStartDataPBImpl; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationFinishDataPBImpl; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationStartDataPBImpl; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerFinishDataPBImpl; import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerStartDataPBImpl; import org.apache.hadoop.yarn.util.ConverterUtils; import com.google.protobuf.InvalidProtocolBufferException; /** * File system implementation of {@link ApplicationHistoryStore}. In this * implementation, one application will have just one file in the file system, * which contains all the history data of one application, and its attempts and * containers. {@link #applicationStarted(ApplicationStartData)} is supposed to * be invoked first when writing any history data of one application and it will * open a file, while {@link #applicationFinished(ApplicationFinishData)} is * supposed to be last writing operation and will close the file. */ @Public @Unstable public class FileSystemApplicationHistoryStore extends AbstractService implements ApplicationHistoryStore { private static final Log LOG = LogFactory .getLog(FileSystemApplicationHistoryStore.class); private static final String ROOT_DIR_NAME = "ApplicationHistoryDataRoot"; private static final int MIN_BLOCK_SIZE = 256 * 1024; private static final String START_DATA_SUFFIX = "_start"; private static final String FINISH_DATA_SUFFIX = "_finish"; private static final FsPermission ROOT_DIR_UMASK = FsPermission .createImmutable((short) 0740); private static final FsPermission HISTORY_FILE_UMASK = FsPermission .createImmutable((short) 0640); private FileSystem fs; private Path rootDirPath; private ConcurrentMap<ApplicationId, HistoryFileWriter> outstandingWriters = new ConcurrentHashMap<ApplicationId, HistoryFileWriter>(); public FileSystemApplicationHistoryStore() { super(FileSystemApplicationHistoryStore.class.getName()); } protected FileSystem getFileSystem(Path path, Configuration conf) throws Exception { return path.getFileSystem(conf); } @Override public void serviceStart() throws Exception { Configuration conf = getConfig(); Path fsWorkingPath = new Path(conf.get(YarnConfiguration.FS_APPLICATION_HISTORY_STORE_URI, conf.get("hadoop.tmp.dir") + "/yarn/timeline/generic-history")); rootDirPath = new Path(fsWorkingPath, ROOT_DIR_NAME); try { fs = getFileSystem(fsWorkingPath, conf); fs.mkdirs(rootDirPath, ROOT_DIR_UMASK); } catch (IOException e) { LOG.error("Error when initializing FileSystemHistoryStorage", e); throw e; } super.serviceStart(); } @Override public void serviceStop() throws Exception { try { for (Entry<ApplicationId, HistoryFileWriter> entry : outstandingWriters .entrySet()) { entry.getValue().close(); } outstandingWriters.clear(); } finally { IOUtils.cleanup(LOG, fs); } super.serviceStop(); } @Override public ApplicationHistoryData getApplication(ApplicationId appId) throws IOException { HistoryFileReader hfReader = getHistoryFileReader(appId); try { boolean readStartData = false; boolean readFinishData = false; ApplicationHistoryData historyData = ApplicationHistoryData.newInstance(appId, null, null, null, null, Long.MIN_VALUE, Long.MIN_VALUE, Long.MAX_VALUE, null, FinalApplicationStatus.UNDEFINED, null); while ((!readStartData || !readFinishData) && hfReader.hasNext()) { HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.equals(appId.toString())) { if (entry.key.suffix.equals(START_DATA_SUFFIX)) { ApplicationStartData startData = parseApplicationStartData(entry.value); mergeApplicationHistoryData(historyData, startData); readStartData = true; } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { ApplicationFinishData finishData = parseApplicationFinishData(entry.value); mergeApplicationHistoryData(historyData, finishData); readFinishData = true; } } } if (!readStartData && !readFinishData) { return null; } if (!readStartData) { LOG.warn("Start information is missing for application " + appId); } if (!readFinishData) { LOG.warn("Finish information is missing for application " + appId); } LOG.info("Completed reading history information of application " + appId); return historyData; } catch (IOException e) { LOG.error("Error when reading history file of application " + appId, e); throw e; } finally { hfReader.close(); } } @Override public Map<ApplicationId, ApplicationHistoryData> getAllApplications() throws IOException { Map<ApplicationId, ApplicationHistoryData> historyDataMap = new HashMap<ApplicationId, ApplicationHistoryData>(); FileStatus[] files = fs.listStatus(rootDirPath); for (FileStatus file : files) { ApplicationId appId = ApplicationId.fromString(file.getPath().getName()); try { ApplicationHistoryData historyData = getApplication(appId); if (historyData != null) { historyDataMap.put(appId, historyData); } } catch (IOException e) { // Eat the exception not to disturb the getting the next // ApplicationHistoryData LOG.error("History information of application " + appId + " is not included into the result due to the exception", e); } } return historyDataMap; } @Override public Map<ApplicationAttemptId, ApplicationAttemptHistoryData> getApplicationAttempts(ApplicationId appId) throws IOException { Map<ApplicationAttemptId, ApplicationAttemptHistoryData> historyDataMap = new HashMap<ApplicationAttemptId, ApplicationAttemptHistoryData>(); HistoryFileReader hfReader = getHistoryFileReader(appId); try { while (hfReader.hasNext()) { HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.startsWith( ConverterUtils.APPLICATION_ATTEMPT_PREFIX)) { ApplicationAttemptId appAttemptId = ApplicationAttemptId.fromString( entry.key.id); if (appAttemptId.getApplicationId().equals(appId)) { ApplicationAttemptHistoryData historyData = historyDataMap.get(appAttemptId); if (historyData == null) { historyData = ApplicationAttemptHistoryData.newInstance( appAttemptId, null, -1, null, null, null, FinalApplicationStatus.UNDEFINED, null); historyDataMap.put(appAttemptId, historyData); } if (entry.key.suffix.equals(START_DATA_SUFFIX)) { mergeApplicationAttemptHistoryData(historyData, parseApplicationAttemptStartData(entry.value)); } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { mergeApplicationAttemptHistoryData(historyData, parseApplicationAttemptFinishData(entry.value)); } } } } LOG.info("Completed reading history information of all application" + " attempts of application " + appId); } catch (IOException e) { LOG.info("Error when reading history information of some application" + " attempts of application " + appId); } finally { hfReader.close(); } return historyDataMap; } @Override public ApplicationAttemptHistoryData getApplicationAttempt( ApplicationAttemptId appAttemptId) throws IOException { HistoryFileReader hfReader = getHistoryFileReader(appAttemptId.getApplicationId()); try { boolean readStartData = false; boolean readFinishData = false; ApplicationAttemptHistoryData historyData = ApplicationAttemptHistoryData.newInstance(appAttemptId, null, -1, null, null, null, FinalApplicationStatus.UNDEFINED, null); while ((!readStartData || !readFinishData) && hfReader.hasNext()) { HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.equals(appAttemptId.toString())) { if (entry.key.suffix.equals(START_DATA_SUFFIX)) { ApplicationAttemptStartData startData = parseApplicationAttemptStartData(entry.value); mergeApplicationAttemptHistoryData(historyData, startData); readStartData = true; } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { ApplicationAttemptFinishData finishData = parseApplicationAttemptFinishData(entry.value); mergeApplicationAttemptHistoryData(historyData, finishData); readFinishData = true; } } } if (!readStartData && !readFinishData) { return null; } if (!readStartData) { LOG.warn("Start information is missing for application attempt " + appAttemptId); } if (!readFinishData) { LOG.warn("Finish information is missing for application attempt " + appAttemptId); } LOG.info("Completed reading history information of application attempt " + appAttemptId); return historyData; } catch (IOException e) { LOG.error("Error when reading history file of application attempt" + appAttemptId, e); throw e; } finally { hfReader.close(); } } @Override public ContainerHistoryData getContainer(ContainerId containerId) throws IOException { HistoryFileReader hfReader = getHistoryFileReader(containerId.getApplicationAttemptId() .getApplicationId()); try { boolean readStartData = false; boolean readFinishData = false; ContainerHistoryData historyData = ContainerHistoryData .newInstance(containerId, null, null, null, Long.MIN_VALUE, Long.MAX_VALUE, null, Integer.MAX_VALUE, null); while ((!readStartData || !readFinishData) && hfReader.hasNext()) { HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.equals(containerId.toString())) { if (entry.key.suffix.equals(START_DATA_SUFFIX)) { ContainerStartData startData = parseContainerStartData(entry.value); mergeContainerHistoryData(historyData, startData); readStartData = true; } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { ContainerFinishData finishData = parseContainerFinishData(entry.value); mergeContainerHistoryData(historyData, finishData); readFinishData = true; } } } if (!readStartData && !readFinishData) { return null; } if (!readStartData) { LOG.warn("Start information is missing for container " + containerId); } if (!readFinishData) { LOG.warn("Finish information is missing for container " + containerId); } LOG.info("Completed reading history information of container " + containerId); return historyData; } catch (IOException e) { LOG.error("Error when reading history file of container " + containerId, e); throw e; } finally { hfReader.close(); } } @Override public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId) throws IOException { ApplicationAttemptHistoryData attemptHistoryData = getApplicationAttempt(appAttemptId); if (attemptHistoryData == null || attemptHistoryData.getMasterContainerId() == null) { return null; } return getContainer(attemptHistoryData.getMasterContainerId()); } @Override public Map<ContainerId, ContainerHistoryData> getContainers( ApplicationAttemptId appAttemptId) throws IOException { Map<ContainerId, ContainerHistoryData> historyDataMap = new HashMap<ContainerId, ContainerHistoryData>(); HistoryFileReader hfReader = getHistoryFileReader(appAttemptId.getApplicationId()); try { while (hfReader.hasNext()) { HistoryFileReader.Entry entry = hfReader.next(); if (entry.key.id.startsWith(ConverterUtils.CONTAINER_PREFIX)) { ContainerId containerId = ContainerId.fromString(entry.key.id); if (containerId.getApplicationAttemptId().equals(appAttemptId)) { ContainerHistoryData historyData = historyDataMap.get(containerId); if (historyData == null) { historyData = ContainerHistoryData.newInstance( containerId, null, null, null, Long.MIN_VALUE, Long.MAX_VALUE, null, Integer.MAX_VALUE, null); historyDataMap.put(containerId, historyData); } if (entry.key.suffix.equals(START_DATA_SUFFIX)) { mergeContainerHistoryData(historyData, parseContainerStartData(entry.value)); } else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) { mergeContainerHistoryData(historyData, parseContainerFinishData(entry.value)); } } } } LOG.info("Completed reading history information of all containers" + " of application attempt " + appAttemptId); } catch (IOException e) { LOG.info("Error when reading history information of some containers" + " of application attempt " + appAttemptId); } finally { hfReader.close(); } return historyDataMap; } @Override public void applicationStarted(ApplicationStartData appStart) throws IOException { HistoryFileWriter hfWriter = outstandingWriters.get(appStart.getApplicationId()); if (hfWriter == null) { Path applicationHistoryFile = new Path(rootDirPath, appStart.getApplicationId().toString()); try { hfWriter = new HistoryFileWriter(applicationHistoryFile); LOG.info("Opened history file of application " + appStart.getApplicationId()); } catch (IOException e) { LOG.error("Error when openning history file of application " + appStart.getApplicationId(), e); throw e; } outstandingWriters.put(appStart.getApplicationId(), hfWriter); } else { throw new IOException("History file of application " + appStart.getApplicationId() + " is already opened"); } assert appStart instanceof ApplicationStartDataPBImpl; try { hfWriter.writeHistoryData(new HistoryDataKey(appStart.getApplicationId() .toString(), START_DATA_SUFFIX), ((ApplicationStartDataPBImpl) appStart).getProto().toByteArray()); LOG.info("Start information of application " + appStart.getApplicationId() + " is written"); } catch (IOException e) { LOG.error("Error when writing start information of application " + appStart.getApplicationId(), e); throw e; } } @Override public void applicationFinished(ApplicationFinishData appFinish) throws IOException { HistoryFileWriter hfWriter = getHistoryFileWriter(appFinish.getApplicationId()); assert appFinish instanceof ApplicationFinishDataPBImpl; try { hfWriter.writeHistoryData(new HistoryDataKey(appFinish.getApplicationId() .toString(), FINISH_DATA_SUFFIX), ((ApplicationFinishDataPBImpl) appFinish).getProto().toByteArray()); LOG.info("Finish information of application " + appFinish.getApplicationId() + " is written"); } catch (IOException e) { LOG.error("Error when writing finish information of application " + appFinish.getApplicationId(), e); throw e; } finally { hfWriter.close(); outstandingWriters.remove(appFinish.getApplicationId()); } } @Override public void applicationAttemptStarted( ApplicationAttemptStartData appAttemptStart) throws IOException { HistoryFileWriter hfWriter = getHistoryFileWriter(appAttemptStart.getApplicationAttemptId() .getApplicationId()); assert appAttemptStart instanceof ApplicationAttemptStartDataPBImpl; try { hfWriter.writeHistoryData(new HistoryDataKey(appAttemptStart .getApplicationAttemptId().toString(), START_DATA_SUFFIX), ((ApplicationAttemptStartDataPBImpl) appAttemptStart).getProto() .toByteArray()); LOG.info("Start information of application attempt " + appAttemptStart.getApplicationAttemptId() + " is written"); } catch (IOException e) { LOG.error("Error when writing start information of application attempt " + appAttemptStart.getApplicationAttemptId(), e); throw e; } } @Override public void applicationAttemptFinished( ApplicationAttemptFinishData appAttemptFinish) throws IOException { HistoryFileWriter hfWriter = getHistoryFileWriter(appAttemptFinish.getApplicationAttemptId() .getApplicationId()); assert appAttemptFinish instanceof ApplicationAttemptFinishDataPBImpl; try { hfWriter.writeHistoryData(new HistoryDataKey(appAttemptFinish .getApplicationAttemptId().toString(), FINISH_DATA_SUFFIX), ((ApplicationAttemptFinishDataPBImpl) appAttemptFinish).getProto() .toByteArray()); LOG.info("Finish information of application attempt " + appAttemptFinish.getApplicationAttemptId() + " is written"); } catch (IOException e) { LOG.error("Error when writing finish information of application attempt " + appAttemptFinish.getApplicationAttemptId(), e); throw e; } } @Override public void containerStarted(ContainerStartData containerStart) throws IOException { HistoryFileWriter hfWriter = getHistoryFileWriter(containerStart.getContainerId() .getApplicationAttemptId().getApplicationId()); assert containerStart instanceof ContainerStartDataPBImpl; try { hfWriter.writeHistoryData(new HistoryDataKey(containerStart .getContainerId().toString(), START_DATA_SUFFIX), ((ContainerStartDataPBImpl) containerStart).getProto().toByteArray()); LOG.info("Start information of container " + containerStart.getContainerId() + " is written"); } catch (IOException e) { LOG.error("Error when writing start information of container " + containerStart.getContainerId(), e); throw e; } } @Override public void containerFinished(ContainerFinishData containerFinish) throws IOException { HistoryFileWriter hfWriter = getHistoryFileWriter(containerFinish.getContainerId() .getApplicationAttemptId().getApplicationId()); assert containerFinish instanceof ContainerFinishDataPBImpl; try { hfWriter.writeHistoryData(new HistoryDataKey(containerFinish .getContainerId().toString(), FINISH_DATA_SUFFIX), ((ContainerFinishDataPBImpl) containerFinish).getProto().toByteArray()); LOG.info("Finish information of container " + containerFinish.getContainerId() + " is written"); } catch (IOException e) { LOG.error("Error when writing finish information of container " + containerFinish.getContainerId(), e); } } private static ApplicationStartData parseApplicationStartData(byte[] value) throws InvalidProtocolBufferException { return new ApplicationStartDataPBImpl( ApplicationStartDataProto.parseFrom(value)); } private static ApplicationFinishData parseApplicationFinishData(byte[] value) throws InvalidProtocolBufferException { return new ApplicationFinishDataPBImpl( ApplicationFinishDataProto.parseFrom(value)); } private static ApplicationAttemptStartData parseApplicationAttemptStartData( byte[] value) throws InvalidProtocolBufferException { return new ApplicationAttemptStartDataPBImpl( ApplicationAttemptStartDataProto.parseFrom(value)); } private static ApplicationAttemptFinishData parseApplicationAttemptFinishData(byte[] value) throws InvalidProtocolBufferException { return new ApplicationAttemptFinishDataPBImpl( ApplicationAttemptFinishDataProto.parseFrom(value)); } private static ContainerStartData parseContainerStartData(byte[] value) throws InvalidProtocolBufferException { return new ContainerStartDataPBImpl( ContainerStartDataProto.parseFrom(value)); } private static ContainerFinishData parseContainerFinishData(byte[] value) throws InvalidProtocolBufferException { return new ContainerFinishDataPBImpl( ContainerFinishDataProto.parseFrom(value)); } private static void mergeApplicationHistoryData( ApplicationHistoryData historyData, ApplicationStartData startData) { historyData.setApplicationName(startData.getApplicationName()); historyData.setApplicationType(startData.getApplicationType()); historyData.setQueue(startData.getQueue()); historyData.setUser(startData.getUser()); historyData.setSubmitTime(startData.getSubmitTime()); historyData.setStartTime(startData.getStartTime()); } private static void mergeApplicationHistoryData( ApplicationHistoryData historyData, ApplicationFinishData finishData) { historyData.setFinishTime(finishData.getFinishTime()); historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo()); historyData.setFinalApplicationStatus(finishData .getFinalApplicationStatus()); historyData.setYarnApplicationState(finishData.getYarnApplicationState()); } private static void mergeApplicationAttemptHistoryData( ApplicationAttemptHistoryData historyData, ApplicationAttemptStartData startData) { historyData.setHost(startData.getHost()); historyData.setRPCPort(startData.getRPCPort()); historyData.setMasterContainerId(startData.getMasterContainerId()); } private static void mergeApplicationAttemptHistoryData( ApplicationAttemptHistoryData historyData, ApplicationAttemptFinishData finishData) { historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo()); historyData.setTrackingURL(finishData.getTrackingURL()); historyData.setFinalApplicationStatus(finishData .getFinalApplicationStatus()); historyData.setYarnApplicationAttemptState(finishData .getYarnApplicationAttemptState()); } private static void mergeContainerHistoryData( ContainerHistoryData historyData, ContainerStartData startData) { historyData.setAllocatedResource(startData.getAllocatedResource()); historyData.setAssignedNode(startData.getAssignedNode()); historyData.setPriority(startData.getPriority()); historyData.setStartTime(startData.getStartTime()); } private static void mergeContainerHistoryData( ContainerHistoryData historyData, ContainerFinishData finishData) { historyData.setFinishTime(finishData.getFinishTime()); historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo()); historyData.setContainerExitStatus(finishData.getContainerExitStatus()); historyData.setContainerState(finishData.getContainerState()); } private HistoryFileWriter getHistoryFileWriter(ApplicationId appId) throws IOException { HistoryFileWriter hfWriter = outstandingWriters.get(appId); if (hfWriter == null) { throw new IOException("History file of application " + appId + " is not opened"); } return hfWriter; } private HistoryFileReader getHistoryFileReader(ApplicationId appId) throws IOException { Path applicationHistoryFile = new Path(rootDirPath, appId.toString()); try { fs.getFileStatus(applicationHistoryFile); } catch (FileNotFoundException e) { throw (FileNotFoundException) new FileNotFoundException("History file for" + " application " + appId + " is not found: " + e).initCause(e); } // The history file is still under writing if (outstandingWriters.containsKey(appId)) { throw new IOException("History file for application " + appId + " is under writing"); } return new HistoryFileReader(applicationHistoryFile); } private class HistoryFileReader { private class Entry { private HistoryDataKey key; private byte[] value; public Entry(HistoryDataKey key, byte[] value) { this.key = key; this.value = value; } } private TFile.Reader reader; private TFile.Reader.Scanner scanner; FSDataInputStream fsdis; public HistoryFileReader(Path historyFile) throws IOException { fsdis = fs.open(historyFile); reader = new TFile.Reader(fsdis, fs.getFileStatus(historyFile).getLen(), getConfig()); reset(); } public boolean hasNext() { return !scanner.atEnd(); } public Entry next() throws IOException { TFile.Reader.Scanner.Entry entry = scanner.entry(); DataInputStream dis = entry.getKeyStream(); HistoryDataKey key = new HistoryDataKey(); key.readFields(dis); dis = entry.getValueStream(); byte[] value = new byte[entry.getValueLength()]; dis.read(value); scanner.advance(); return new Entry(key, value); } public void reset() throws IOException { IOUtils.cleanup(LOG, scanner); scanner = reader.createScanner(); } public void close() { IOUtils.cleanup(LOG, scanner, reader, fsdis); } } private class HistoryFileWriter { private FSDataOutputStream fsdos; private TFile.Writer writer; public HistoryFileWriter(Path historyFile) throws IOException { if (fs.exists(historyFile)) { fsdos = fs.append(historyFile); } else { fsdos = fs.create(historyFile); } try { fs.setPermission(historyFile, HISTORY_FILE_UMASK); writer = new TFile.Writer(fsdos, MIN_BLOCK_SIZE, getConfig().get( YarnConfiguration.FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE, YarnConfiguration.DEFAULT_FS_APPLICATION_HISTORY_STORE_COMPRESSION_TYPE), null, getConfig()); } catch (IOException e) { IOUtils.cleanup(LOG, fsdos); throw e; } } public synchronized void close() { IOUtils.cleanup(LOG, writer, fsdos); } public synchronized void writeHistoryData(HistoryDataKey key, byte[] value) throws IOException { DataOutputStream dos = null; try { dos = writer.prepareAppendKey(-1); key.write(dos); } finally { IOUtils.cleanup(LOG, dos); } try { dos = writer.prepareAppendValue(value.length); dos.write(value); } finally { IOUtils.cleanup(LOG, dos); } } } private static class HistoryDataKey implements Writable { private String id; private String suffix; public HistoryDataKey() { this(null, null); } public HistoryDataKey(String id, String suffix) { this.id = id; this.suffix = suffix; } @Override public void write(DataOutput out) throws IOException { out.writeUTF(id); out.writeUTF(suffix); } @Override public void readFields(DataInput in) throws IOException { id = in.readUTF(); suffix = in.readUTF(); } } }
/** *+ * ATime.java * 1.0.0 Sep 30, 2014 Leo Hinterlang *- */ package com.fidelis.valface; import java.util.Calendar; import java.util.Date; /** * ATime * * @version 1.0.0 * @author Leo Hinterlang * */ public class ATime implements Comparable<ATime> { private static final int HOURS_DAY = 24; private static final int MINUTES_HOUR = 60; private static final int SECONDS_MINUTE = 60; private static final long NANOS_SECOND = 1000000000L; private static final int MINUTES_DAY = MINUTES_HOUR * HOURS_DAY; private static final int SECONDS_DAY = SECONDS_MINUTE * MINUTES_DAY; private static final long NANOS_DAY = NANOS_SECOND * SECONDS_DAY; private static final int SECONDS_HOUR = SECONDS_MINUTE * MINUTES_HOUR; private static final long NANOS_HOUR = NANOS_SECOND * SECONDS_HOUR; private static final long NANOS_MINUTE = NANOS_SECOND * SECONDS_MINUTE; private static final int NANOS_MILLI = 1000000; private static final int NANOS_MICRO = 1000; private int hour; private int minute; private int second; private int nano; private static ATime nowTest; /** * Creates a new {@code ATime} object. * * @param hour the hour value from 0 to 23 * @param minute the minute value from 0 to 59 * @param second the second value from 0 to 59 * @param nanoOfSecond the nanosecond value from 0 to 999,999,999 */ private ATime (int hour, int minute, int second, int nanoOfSecond) { this.hour = hour; this.minute = minute; this.second = second; this.nano = nanoOfSecond; } /** * Returns the hour-of-day field. * This field includes values in the range 0 to 23. * * @return the hour of the day */ public int getHour () { return hour; } /** * Returns the minute-of-hour field. * This field includes values in the range 0 to 59. * * @return the minute of the hour */ public int getMinute () { return minute; } /** * Returns the second-of-minute field. * This field includes values in the range 0 to 59. * * @return the second of the minute */ public int getSecond () { return second; } /** * Returns the nano-of-second field. * This field includes values in the range 0 to 999,999,999. * * @return the nanosecond of the second */ public int getNano () { return nano; } /** * Returns a new ATime with the current time of day. * If the {@link #nowTestSet(ATime) nowTestSet} method has set a test value, * that ATime is returned. Otherwise, the current time is returned. * * @return a new ATime * @see #nowTestSet(ATime) * @see #nowTestClear() */ public static ATime now () { if (nowTest != null) { return nowTest; } Calendar cal = Calendar.getInstance(); return from(cal); } /** * Sets a test value to be returned by the {@link #now() now} method. * Use {@link #nowTestClear() nowTestClear} to restore the normal use of the now * method. * * @param now the test value to set * @see #now() * @see #nowTestClear() */ public static void nowTestSet (ATime now) { nowTest = now; } /** * Clears any test value set for the ATime returned by the {@link #now() now} method. * This restores the normal behavior of the now method to return the current time of day. * * @see #now() * @see #nowTestSet(ATime) */ public static void nowTestClear () { nowTest = null; } /** * Returns a new ATime constructed from a time specification string. * The time specifier indicates the values for the fields that make up the * time. These fields are the <b>hour</b>, the <b>minute</b>, the <b>second</b> * and the fractional part of the second, the <b>nanosecond</b>. If a field is * left unspecified, a zero value is used by default. * <p> * The time specifier takes the form: * <blockquote> * <b><code>HH[[:]mm[[:]ss[.nano]]]</code></b><br><br> * Where: * <blockquote> * HH - the hour from 00 to 23.<br> * mm - the minute from 00 to 59.<br> * ss - the second from 00 to 59.<br> * nano - the fractional part up to 9 digits.<br> * </blockquote> * The hour is the only required field. * The colons (:) may be omitted as long as the {@code HH}, {@code mm}, and {@code ss} * fields are composed of 2 digits each. * If a nano field is included, the decimal point (.) must be included as well. * <p> * Examples:<br><br> * <blockquote><code> * 10<br> * 1030<br> * 10:30<br> * 10:30:45<br> * 10:30:45.123456789<br> * 103045.123456789<br> * </code></blockquote> * </blockquote> * * @param timeString the time specifier * @return a new ATime * @throws IllegalArgumentException if the time specification is invalid */ public static ATime parse (String timeString) throws IllegalArgumentException { boolean status = true; String zero = "0"; String h = zero; String m = zero; String s = null; String n = null; if (timeString.contains(":")) { String[] parts = timeString.split("\\:"); int len = parts.length; if (len >= 2) { h = parts[0]; m = parts[1]; } if (len == 3) { s = parts[2]; } if (2 > len || len > 3) { status = false; } } else { int len = timeString.length(); if (len >= 2) { h = timeString.substring(0, 2); } if (len >= 4) { m = timeString.substring(2, 4); } if (len >= 6) { s = timeString.substring(4); } } if (s == null) { s = zero; n = zero; } else { int index = s.indexOf("."); if (index > 0) { n = s.substring(index + 1); s = s.substring(0, index); } else { n = zero; } } int hour = 0; int minute = 0; int second = 0; int nano = 0; if (status) { try { hour = Integer.valueOf(h); minute = Integer.valueOf(m); second = Integer.valueOf(s); nano = fraction9(n); } catch (NumberFormatException ex) { status = false; } } if (status) { checkHour(hour); checkMinute(minute); checkSecond(second); checkNano(nano); return new ATime(hour, minute, second, nano); } throw new IllegalArgumentException( "Invalid time String: " + timeString); } private static int fraction9 (String number) { if (! number.matches("\\d{1,9}")) { throw new NumberFormatException("Invalid number: " + number); } int val = Integer.valueOf(number); int len = number.length(); while (len < 9) { val *= 10; ++len; } return val; } /** * Formats the time based on pattern keywords. * The pattern is scanned for keywords that are replaced by the indicated value or text. * The valid keywords include the following: * <p> * <ul> * <li>Hour - the hour of the day as a 24 hour value (no leading zero). * <li>hour - the hour of the day as a 12 hour value (no leading zero). * <li>HH - 2 digit hour in 24 hour format. * <li>hh - 2 digit hour in 12 hour format. * <li>Minute - the minute of the hour value (no leading zero). * <li>minute - 2 digit minute. * <li>mm - same as "minute". * <li>Second - the second of the hour value (no leading zero). * <li>second - 2 digit second. * <li>ss - same as "second". * <li>Nano - the nanosecond of the second value (no leading zeroes). * <li>nano - 9 digit nanosecond of second. * <li>micro - 6 digit microsecond of second. * <li>milli - 3 digit millisecond of second. * <li>nn1 - 3 digit 1st group of nanosecond. * <li>nn2 - 3 digit 2nd group of nanosecond. * <li>nn3 - 3 digit 3rd group of nanosecond. * <li>AMPM - Uppercase AM/PM indicator. * <li>ampm - Lowercase am/pm indicator. * </ul> * <p> * Other characters appearing in the pattern are copied without translation. * To protect text that may contain segments that make up a keyword, place the * text between angle brackets as in {@code <the second time>}. * <p> * Examples:<br><br> * <blockquote><code> * HH:mm:ss.nano => 20:30:05.123456789<br> * hh:mm ampm => 08:30 pm<br> * &lt;Hour:&gt; Hour &lt;Minute:&gt; Minute => Hour: 20 Minute: 30<br> * HH mm ss - nn1.nn2.nn3 => 20 30 05 - 123.456.789<br> * &lt;Milliseconds:&gt; milli => Milliseconds: 123<br> * &lt;Microseconds:&gt; micro => Microseconds: 123456<br> * </code></blockquote> * * @param pattern the format pattern * @return a formatted string */ public String format (String pattern) { // Null pattern returns toString. if (pattern == null) { return toString(); } // Setup StringBuilder. StringBuilder sb = new StringBuilder(40); // Pattern character loop. boolean inText = false; for (int n = 0; n < pattern.length(); n++) { char c = pattern.charAt(n); // Already in angle bracket text. if (inText) { // End of text. Reset flag and continue. if (c == '>') { inText = false; continue; } // Not end of text. Append char and continue. else { sb.append(c); continue; } } // Char is not a letter. if (! Character.isLetter(c)) { // Start of angle bracket text. if (c == '<') { inText = true; continue; } // Append non-letter char. sb.append(c); } // Char is a letter. else { // Isolate start of possible keyword. String kw = pattern.substring(n); int len = keywordSubstitute(kw, sb); // Keyword match. Adjust index. if (len != 0) { n += len - 1; } // Not a keyword match. Append the char. else { sb.append(c); } } } return sb.toString(); } int keywordSubstitute (String kw, StringBuilder sb) { String[] keywords = { "Hour", "hour", "HH", "hh", "Minute", "minute", "mm", "Second", "second", "ss", "Nano", "nano", "milli", "micro", "AMPM", "ampm", "nn1", "nn2", "nn3" }; // the order of these must not be changed. int len = 0; int m; for (m = 0; m < keywords.length; m++) { if (kw.startsWith(keywords[m])) { len = keywords[m].length(); break; } } // Keyword match found. if (len != 0) { switch (m) { case 0: // Hour sb.append(String.format("%d", hour)); break; case 1: // hour sb.append(String.format("%d", hour % 12)); break; case 2: // HH sb.append(String.format("%02d", hour)); break; case 3: // hh sb.append(String.format("%02d", hour % 12)); break; case 4: // Minute sb.append(String.format("%d", minute)); break; case 5: // minute case 6: // mm sb.append(String.format("%02d", minute)); break; case 7: // Second sb.append(String.format("%d", second)); break; case 8: // second case 9: // ss sb.append(String.format("%02d", second)); break; case 10: // Nano sb.append(String.format("%d", nano)); break; case 11: // nano sb.append(String.format("%09d", nano)); break; case 12: // milli sb.append(String.format("%03d", nano / NANOS_MILLI)); break; case 13: // micro sb.append(String.format("%06d", nano / NANOS_MICRO)); break; case 14: // AMPM sb.append(hour < 12 ? "AM" : "PM"); break; case 15: // ampm sb.append(hour < 12 ? "am" : "pm"); break; case 16: // nn1 sb.append(String.format("%09d", nano).substring(0, 3)); break; case 17: // nn2 sb.append(String.format("%09d", nano).substring(3, 6)); break; case 18: // nn3 sb.append(String.format("%09d", nano).substring(6, 9)); break; } } return len; } /** * Returns a new {@code ATime} with the hour field modified. * * @param hour the new hour value: 0 to 23 * @return a new ATime * @throws IllegalArgumentException if the hour value is invalid */ public ATime withHour (int hour) throws IllegalArgumentException { checkHour(hour); return new ATime(hour, minute, second, nano); } /** * Returns a new {@code ATime} with the minute field modified. * * @param minute the new minute value: 0 to 59 * @return a new ATime * @throws IllegalArgumentException if the minute value is invalid */ public ATime withMinute (int minute) throws IllegalArgumentException { checkMinute(minute); return new ATime(hour, minute, second, nano); } /** * Returns a new {@code ATime} with the second field modified. * * @param second the new second value: 0 to 59 * @return a new ATime * @throws IllegalArgumentException if the second value is invalid */ public ATime withSecond (int second) throws IllegalArgumentException { checkSecond(second); return new ATime(hour, minute, second, nano); } /** * Returns a new {@code ATime} with the nanosecond field modified. * * @param nanoOfSecond the new nanosecond value: 0 to 999,999,999 * @return a new ATime * @throws IllegalArgumentException if the nanosecond value is invalid */ public ATime withNano (int nanoOfSecond) throws IllegalArgumentException { checkNano(nanoOfSecond); return new ATime(hour, minute, second, nanoOfSecond); } /** * Returns a new {@code ATime} constructed from the specified fields. * * @param hour the hour of day value: 0 to 23 * @param minute the minute of hour value: 0 to 59 * @param second the second of minute value: 0 to 59 * @param nanoOfSecond the nanosecond of second value: 0 to 999,999,999 * @return a new ATime * @throws IllegalArgumentException if the value of any field is invalid */ public static ATime of (int hour, int minute, int second, int nanoOfSecond) throws IllegalArgumentException { checkHour(hour); checkMinute(minute); checkSecond(second); checkNano(nanoOfSecond); return new ATime(hour, minute, second, nanoOfSecond); } /** * Returns a new {@code ATime} constructed from the specified fields. * The nanosecond field is set to zero. * * @param hour the hour of day value: 0 to 23 * @param minute the minute of hour value: 0 to 59 * @param second the second of minute value: 0 to 59 * @return a new ATime * @throws IllegalArgumentException if the value of any field is invalid */ public static ATime of (int hour, int minute, int second) throws IllegalArgumentException { return of(hour, minute, second, 0); } /** * Returns a new {@code ATime} constructed from the specified fields. * The second and nanosecond fields are set to zero. * * @param hour the hour of day value: 0 to 23 * @param minute the minute of hour value: 0 to 59 * @return a new ATime * @throws IllegalArgumentException if the value of any field is invalid */ public static ATime of (int hour, int minute) throws IllegalArgumentException { return of(hour, minute, 0, 0); } /** * Returns a new {@code ATime} from a nanosecond of day value. * * @param nanoOfDay the nano of day value: 0 to 24 * 60 * 60 * 1,000,000,000 - 1 * @return a new ATime * @throws IllegalArgumentException if the value is out of range */ public static ATime ofNanoOfDay (long nanoOfDay) throws IllegalArgumentException { checkNanoOfDay(nanoOfDay); int hour = (int) (nanoOfDay / NANOS_HOUR); nanoOfDay -= hour * NANOS_HOUR; int minute = (int) (nanoOfDay / NANOS_MINUTE); nanoOfDay -= minute * NANOS_MINUTE; int second = (int) (nanoOfDay / NANOS_SECOND); nanoOfDay -= second * NANOS_SECOND; return new ATime(hour, minute, second, (int) nanoOfDay); } /** * Returns a new {@code ATime} from a second of day value. * The nanosecond field is set to zero. * * @param secondOfDay the second of day value: 0 to 24 * 60 * 60 - 1 * @return a new ATime * @throws IllegalArgumentException if the value is out of range */ public static ATime ofSecondOfDay (long secondOfDay) throws IllegalArgumentException { checkSecondOfDay(secondOfDay); int hour = (int) secondOfDay / SECONDS_HOUR; secondOfDay -= hour * SECONDS_HOUR; int minute = (int) secondOfDay / SECONDS_MINUTE; secondOfDay -= minute * SECONDS_MINUTE; return new ATime(hour, minute, (int) secondOfDay, 0); } /** * Returns a nanosecond of day value for this ATime. * * @return the nanosecond of day value for this ATime */ public long toNanoOfDay () { return hour * NANOS_HOUR + minute * NANOS_MINUTE + second * NANOS_SECOND + nano; } /** * Returns a second of day value for this ATime. * * @return the second of day value for this ATime. */ public int toSecondOfDay () { return hour * SECONDS_HOUR + minute * SECONDS_MINUTE + second; } /** * Returns this ATime as a {@code java.util.Date}. * The nanosecond is converted to achieve millisecond accuracy. * * @return this ATime as a java.util.Date */ public Date toDate () { return toCalendar().getTime(); } /** * Returns this ATime as a {@code java.util.Calendar}. * The nanosecond is converted to achieve millisecond accuracy. * * @return this ATime as a java.util.Calendar */ public Calendar toCalendar () { Calendar cal = Calendar.getInstance(); cal.set(0, 0, 0, hour, minute, second); cal.set(Calendar.MILLISECOND, nano / NANOS_MILLI); return cal; } /** * Returns a new ATime from a {@code java.util.Date}. * The date's millisecond value is converted for the nanosecond field. * * @param date the java.util.Date object * @return a new ATime */ public static ATime from (Date date) { Calendar cal = Calendar.getInstance(); cal.setTime(date); return from(cal); } /** * Returns a new ATime from a {@code java.util.Calendar}. * The calendar's millisecond value is converted for the nanosecond field. * * @param calendar the java.util.Calendar object * @return a new ATime */ public static ATime from (Calendar calendar) { int hour = calendar.get(Calendar.HOUR_OF_DAY); int minute = calendar.get(Calendar.MINUTE); int second = calendar.get(Calendar.SECOND); int milli = calendar.get(Calendar.MILLISECOND); return new ATime(hour, minute, second, milli * NANOS_MILLI); } /** * Compares this ATime with another ATime. * Returns a value less than, greater than, or equal to zero as this * ATime is less than, greater than, or equal to the other ATime. * * @param other the other ATime to compare to * @return a value less than, greater than, or equal to zero * @see java.lang.Comparable#compareTo(java.lang.Object) */ @Override public int compareTo (ATime other) { int cmp = 0; if (this.hour < other.hour) { --cmp; } else if (this.hour > other.hour) { ++cmp; } if (cmp != 0) { return cmp; } if (this.minute < other.minute) { --cmp; } else if (this.minute > other.minute) { ++cmp; } if (cmp != 0) { return cmp; } if (this.second < other.second) { --cmp; } else if (this.second > other.second) { ++cmp; } if (cmp != 0) { return cmp; } if (this.nano < other.nano) { --cmp; } else if (this.nano > other.nano) { ++cmp; } return cmp; } /** * Returns true if this ATime is equal to another ATime. * * @param other the other ATime to compare * @return true if this ATime equals the other ATime */ public boolean equals (ATime other) { return compareTo(other) == 0; } /** * Returns true if this ATime is before another ATime. * * @param other the other ATime to compare * @return true if this ATime is before the other ATime */ public boolean isBefore (ATime other) { return compareTo(other) < 0; } /** * Returns true if this ATime is after another ATime. * * @param other the other ATime to compare * @return true if this ATime is after the other ATime */ public boolean isAfter (ATime other) { return compareTo(other) > 0; } /** * Returns a new ATime with the number of hours added to this ATime. * * @param hours the number of hours to add * @return a new ATime */ public ATime plusHours (long hours) { int newHour = ((int) (hours % HOURS_DAY) + hour + HOURS_DAY) % HOURS_DAY; return new ATime(newHour, minute, second, nano); } /** * Returns a new ATime with the number of minutes added to this ATime. * * @param minutes the number of minutes to add * @return a new ATime */ public ATime plusMinutes (long minutes) { int minTime = hour * MINUTES_HOUR + minute; int newMinTime = ((int) (minutes % MINUTES_DAY) + minTime + MINUTES_DAY) % MINUTES_DAY; int newHour = (int) (newMinTime / MINUTES_HOUR) % HOURS_DAY; int newMinute = (int) (newMinTime % MINUTES_HOUR); return new ATime(newHour, newMinute, second, nano); } /** * Returns a new ATime with the number of seconds added to this ATime. * * @param seconds the number of seconds to add * @return a new ATime */ public ATime plusSeconds (long seconds) { int secTime = toSecondOfDay(); int newSecTime = ((int) (seconds % SECONDS_DAY) + secTime + SECONDS_DAY) % SECONDS_DAY; int newHour = (int) (newSecTime / SECONDS_HOUR) % HOURS_DAY; int newMinute = (int) (newSecTime / SECONDS_MINUTE) % MINUTES_HOUR; int newSecond = (int) (newSecTime % SECONDS_MINUTE); return new ATime(newHour, newMinute, newSecond, nano); } /** * Returns a new ATime with the number of nanoseconds added to this ATime. * * @param nanos the number of nanoseconds to add * @return a new ATime */ public ATime plusNanos (long nanos) { long nanoTime = toNanoOfDay(); long newNanoTime = ((nanos % NANOS_DAY) + nanoTime + NANOS_DAY) % NANOS_DAY; int newHour = (int) (newNanoTime / NANOS_HOUR) % HOURS_DAY; int newMinute = (int) (newNanoTime / NANOS_MINUTE) % MINUTES_HOUR; int newSecond = (int) (newNanoTime / NANOS_SECOND) % SECONDS_MINUTE; int newNano = (int) (newNanoTime % NANOS_SECOND); return new ATime(newHour, newMinute, newSecond, newNano); } /** * Returns a new ATime with the number of hours subtracted from this ATime. * * @param hours the number of hours to subtract * @return a new ATime */ public ATime minusHours (long hours) { return plusHours(-hours); } /** * Returns a new ATime with the number of minutes subtracted from this ATime. * * @param minutes the number of minutes to subtract * @return a new ATime */ public ATime minusMinutes (long minutes) { return plusMinutes(-minutes); } /** * Returns a new ATime with the number of seconds subtracted from this ATime. * * @param seconds the number of seconds to subtract * @return a new ATime */ public ATime minusSeconds (long seconds) { return plusSeconds(-seconds); } /** * Returns a new ATime with the number of nanoseconds subtracted from this ATime. * * @param nanos the number of nanoseconds to subtract * @return a new ATime */ public ATime minusNanos (long nanos) { return plusNanos(-nanos); } public String toString () { StringBuilder sb = new StringBuilder(20); sb.append(String.format("%02d:%02d", hour, minute)); if (second != 0 || nano != 0) { sb.append(String.format(":%02d", second)); } if (nano != 0) { if ((nano % 1000) != 0) { sb.append(String.format(".%09d", nano)); } else if (( nano % 1000000) != 0) { sb.append(String.format(".%06d", nano / 1000)); } else { sb.append(String.format(".%03d", nano / 1000000)); } } return sb.toString(); } private static void checkHour (int hour) throws IllegalArgumentException { if (0 > hour || hour >= HOURS_DAY) { throw new IllegalArgumentException( "Invalid hour specified: " + hour + " (0 .. 23)"); } } private static void checkMinute (int minute) throws IllegalArgumentException { if (0 > minute || minute >= MINUTES_HOUR) { throw new IllegalArgumentException( "Invalid minute specified: " + minute + " (0 .. 59)"); } } private static void checkSecond (int second) throws IllegalArgumentException { if (0 > second || second >= SECONDS_MINUTE) { throw new IllegalArgumentException( "Invalid second specifield: " + second + " (0 .. 59)"); } } private static void checkNano (int nano) throws IllegalArgumentException { if (0 > nano || nano >= NANOS_SECOND) { throw new IllegalArgumentException( "Invalid nanosecond specified: " + nano); } } private static void checkSecondOfDay (long secondOfDay) throws IllegalArgumentException { if (0 > secondOfDay || secondOfDay >= SECONDS_DAY) { throw new IllegalArgumentException( "Invalid second-of-day specified: " + secondOfDay); } } private static void checkNanoOfDay (long nanoOfDay) throws IllegalArgumentException { if (0 > nanoOfDay || nanoOfDay >= NANOS_DAY) { throw new IllegalArgumentException( "Invalid nano-of-day specified: " + nanoOfDay); } } }
/* * Copyright 2009-2010 WSO2, Inc. (http://wso2.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.developerstudio.eclipse.esb.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.w3c.dom.Element; import org.wso2.developerstudio.eclipse.esb.AbstractProxySequenceConfiguration; import org.wso2.developerstudio.eclipse.esb.EsbPackage; import org.wso2.developerstudio.eclipse.esb.Mediator; import org.wso2.developerstudio.eclipse.esb.MediatorSequence; import org.wso2.developerstudio.eclipse.esb.ProxySequenceType; import org.wso2.developerstudio.eclipse.esb.RegistryKeyProperty; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Abstract Proxy Sequence Configuration</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link org.wso2.developerstudio.eclipse.esb.impl.AbstractProxySequenceConfigurationImpl#getSequenceType <em>Sequence Type</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.impl.AbstractProxySequenceConfigurationImpl#getInlineSequence <em>Inline Sequence</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.impl.AbstractProxySequenceConfigurationImpl#getSequenceKey <em>Sequence Key</em>}</li> * <li>{@link org.wso2.developerstudio.eclipse.esb.impl.AbstractProxySequenceConfigurationImpl#getSequenceName <em>Sequence Name</em>}</li> * </ul> * </p> * * @generated */ public abstract class AbstractProxySequenceConfigurationImpl extends ModelObjectImpl implements AbstractProxySequenceConfiguration { /** * The default value of the '{@link #getSequenceType() <em>Sequence Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSequenceType() * @generated * @ordered */ protected static final ProxySequenceType SEQUENCE_TYPE_EDEFAULT = ProxySequenceType.ANONYMOUS; /** * The cached value of the '{@link #getSequenceType() <em>Sequence Type</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSequenceType() * @generated * @ordered */ protected ProxySequenceType sequenceType = SEQUENCE_TYPE_EDEFAULT; /** * The cached value of the '{@link #getInlineSequence() <em>Inline Sequence</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getInlineSequence() * @generated * @ordered */ protected MediatorSequence inlineSequence; /** * The cached value of the '{@link #getSequenceKey() <em>Sequence Key</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSequenceKey() * @generated * @ordered */ protected RegistryKeyProperty sequenceKey; /** * The default value of the '{@link #getSequenceName() <em>Sequence Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSequenceName() * @generated * @ordered */ protected static final String SEQUENCE_NAME_EDEFAULT = "sequence_name"; /** * The cached value of the '{@link #getSequenceName() <em>Sequence Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getSequenceName() * @generated * @ordered */ protected String sequenceName = SEQUENCE_NAME_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected AbstractProxySequenceConfigurationImpl() { super(); } /** * Utility method for loading a generic proxy sequence configuration. * * @param contentElem * {@link Element} which hosts the sequence configuration. * @param localName * local name of the child {@link Element} or the attribute * within the content element which holds the sequence * configuration. * @throws Exception * if an error occurs while loading the sequence configuration. */ protected void loadContent(Element contentElem, String localName) throws Exception { Element sequenceElem = getChildElement(contentElem, localName); if (null != sequenceElem) { super.doLoad(sequenceElem); setSequenceType(ProxySequenceType.ANONYMOUS); MediatorSequence sequence = getEsbFactory().createMediatorSequence(); sequence.load(sequenceElem); // Force the sequence to be anonymous. sequence.setAnonymous(true); setInlineSequence(sequence); } else if (contentElem.hasAttribute(localName)) { String sequenceReference = contentElem.getAttribute(localName); // TODO: This is not optimal, we could mis-interpret a registry // reference as a named reference and vice versa. if (isRegistryKey(sequenceReference)) { setSequenceType(ProxySequenceType.REGISTRY_REFERENCE); getSequenceKey().load(contentElem); } else { setSequenceType(ProxySequenceType.NAMED_REFERENCE); setSequenceName(sequenceReference); } } } /** * Utility method for loading a generic proxy sequence configuration. * * @param contentElem * {@link Element} which should host the sequence configuration. * @param localName * local name of the child {@link Element} or the attribute * within the content element which should hold the sequence * configuration. * @return child {@link Element} if one was created, null otherwise. * @throws Exception * if an error occurs while saving the sequence configuration. */ protected Element saveContent(Element contentElem, String localName) throws Exception { switch (getSequenceType()) { case ANONYMOUS: { if (null != getInlineSequence()) { // Force the sequence to be anonymous. getInlineSequence().setAnonymous(true); Element sequenceElem = createChildElement(contentElem, localName); if (isOnErrorValuePresent()){ sequenceElem.setAttribute("onError", getInlineSequence().getOnError().getKeyValue()); } for (Mediator mediator: getInlineSequence().getMediators()) { mediator.save(sequenceElem); } return sequenceElem; } break; } case REGISTRY_REFERENCE: { getSequenceKey().save(contentElem); break; } case NAMED_REFERENCE: { contentElem.setAttribute(localName, getSequenceName()); break; } default : { // Do nothing. } } return null; } private boolean isOnErrorValuePresent() { String onErrorValue = getInlineSequence().getOnError().getKeyValue(); return onErrorValue!=null && !onErrorValue.equals(""); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return EsbPackage.Literals.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public ProxySequenceType getSequenceType() { return sequenceType; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSequenceType(ProxySequenceType newSequenceType) { ProxySequenceType oldSequenceType = sequenceType; sequenceType = newSequenceType == null ? SEQUENCE_TYPE_EDEFAULT : newSequenceType; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_TYPE, oldSequenceType, sequenceType)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MediatorSequence getInlineSequence() { return inlineSequence; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetInlineSequence(MediatorSequence newInlineSequence, NotificationChain msgs) { MediatorSequence oldInlineSequence = inlineSequence; inlineSequence = newInlineSequence; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__INLINE_SEQUENCE, oldInlineSequence, newInlineSequence); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setInlineSequence(MediatorSequence newInlineSequence) { if (newInlineSequence != inlineSequence) { NotificationChain msgs = null; if (inlineSequence != null) msgs = ((InternalEObject)inlineSequence).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__INLINE_SEQUENCE, null, msgs); if (newInlineSequence != null) msgs = ((InternalEObject)newInlineSequence).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__INLINE_SEQUENCE, null, msgs); msgs = basicSetInlineSequence(newInlineSequence, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__INLINE_SEQUENCE, newInlineSequence, newInlineSequence)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public RegistryKeyProperty getSequenceKey() { return sequenceKey; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetSequenceKey(RegistryKeyProperty newSequenceKey, NotificationChain msgs) { RegistryKeyProperty oldSequenceKey = sequenceKey; sequenceKey = newSequenceKey; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_KEY, oldSequenceKey, newSequenceKey); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSequenceKey(RegistryKeyProperty newSequenceKey) { if (newSequenceKey != sequenceKey) { NotificationChain msgs = null; if (sequenceKey != null) msgs = ((InternalEObject)sequenceKey).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_KEY, null, msgs); if (newSequenceKey != null) msgs = ((InternalEObject)newSequenceKey).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_KEY, null, msgs); msgs = basicSetSequenceKey(newSequenceKey, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_KEY, newSequenceKey, newSequenceKey)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getSequenceName() { return sequenceName; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setSequenceName(String newSequenceName) { String oldSequenceName = sequenceName; sequenceName = newSequenceName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_NAME, oldSequenceName, sequenceName)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__INLINE_SEQUENCE: return basicSetInlineSequence(null, msgs); case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_KEY: return basicSetSequenceKey(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_TYPE: return getSequenceType(); case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__INLINE_SEQUENCE: return getInlineSequence(); case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_KEY: return getSequenceKey(); case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_NAME: return getSequenceName(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_TYPE: setSequenceType((ProxySequenceType)newValue); return; case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__INLINE_SEQUENCE: setInlineSequence((MediatorSequence)newValue); return; case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_KEY: setSequenceKey((RegistryKeyProperty)newValue); return; case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_NAME: setSequenceName((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_TYPE: setSequenceType(SEQUENCE_TYPE_EDEFAULT); return; case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__INLINE_SEQUENCE: setInlineSequence((MediatorSequence)null); return; case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_KEY: setSequenceKey((RegistryKeyProperty)null); return; case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_NAME: setSequenceName(SEQUENCE_NAME_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_TYPE: return sequenceType != SEQUENCE_TYPE_EDEFAULT; case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__INLINE_SEQUENCE: return inlineSequence != null; case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_KEY: return sequenceKey != null; case EsbPackage.ABSTRACT_PROXY_SEQUENCE_CONFIGURATION__SEQUENCE_NAME: return SEQUENCE_NAME_EDEFAULT == null ? sequenceName != null : !SEQUENCE_NAME_EDEFAULT.equals(sequenceName); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (sequenceType: "); result.append(sequenceType); result.append(", sequenceName: "); result.append(sequenceName); result.append(')'); return result.toString(); } } //AbstractProxySequenceConfigurationImpl
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.search; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.terms.InternalTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.metrics.InternalMax; import org.elasticsearch.search.aggregations.metrics.InternalMin; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase.SuiteScopeTestCase; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.search.action.AsyncSearchResponse; import org.elasticsearch.xpack.core.search.action.SubmitAsyncSearchRequest; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; @SuiteScopeTestCase public class AsyncSearchActionIT extends AsyncSearchIntegTestCase { private static String indexName; private static int numShards; private static int numKeywords; private static Map<String, AtomicInteger> keywordFreqs; private static float maxMetric = Float.NEGATIVE_INFINITY; private static float minMetric = Float.POSITIVE_INFINITY; @Override public void setupSuiteScopeCluster() throws InterruptedException { indexName = "test-async"; numShards = randomIntBetween(1, 20); int numDocs = randomIntBetween(100, 1000); createIndex(indexName, Settings.builder() .put("index.number_of_shards", numShards) .build()); numKeywords = randomIntBetween(1, 100); keywordFreqs = new HashMap<>(); Set<String> keywordSet = new HashSet<>(); for (int i = 0; i < numKeywords; i++) { keywordSet.add(randomAlphaOfLengthBetween(10, 20)); } numKeywords = keywordSet.size(); String[] keywords = keywordSet.toArray(String[]::new); List<IndexRequestBuilder> reqs = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { float metric = randomFloat(); maxMetric = Math.max(metric, maxMetric); minMetric = Math.min(metric, minMetric); String keyword = keywords[randomIntBetween(0, numKeywords-1)]; keywordFreqs.compute(keyword, (k, v) -> { if (v == null) { return new AtomicInteger(1); } v.incrementAndGet(); return v; }); reqs.add(client().prepareIndex(indexName).setSource("terms", keyword, "metric", metric)); } indexRandom(true, true, reqs); } public void testMaxMinAggregation() throws Exception { int step = numShards > 2 ? randomIntBetween(2, numShards) : 2; int numFailures = randomBoolean() ? randomIntBetween(0, numShards) : 0; SearchSourceBuilder source = new SearchSourceBuilder() .aggregation(AggregationBuilders.min("min").field("metric")) .aggregation(AggregationBuilders.max("max").field("metric")); try (SearchResponseIterator it = assertBlockingIterator(indexName, numShards, source, numFailures, step)) { AsyncSearchResponse response = it.next(); while (it.hasNext()) { response = it.next(); assertNotNull(response.getSearchResponse()); if (response.getSearchResponse().getSuccessfulShards() > 0) { assertNotNull(response.getSearchResponse().getAggregations()); assertNotNull(response.getSearchResponse().getAggregations().get("max")); assertNotNull(response.getSearchResponse().getAggregations().get("min")); InternalMax max = response.getSearchResponse().getAggregations().get("max"); InternalMin min = response.getSearchResponse().getAggregations().get("min"); assertThat((float) min.getValue(), greaterThanOrEqualTo(minMetric)); assertThat((float) max.getValue(), lessThanOrEqualTo(maxMetric)); } } if (numFailures == numShards) { assertNotNull(response.getFailure()); } else { assertNotNull(response.getSearchResponse()); assertNotNull(response.getSearchResponse().getAggregations()); assertNotNull(response.getSearchResponse().getAggregations().get("max")); assertNotNull(response.getSearchResponse().getAggregations().get("min")); InternalMax max = response.getSearchResponse().getAggregations().get("max"); InternalMin min = response.getSearchResponse().getAggregations().get("min"); if (numFailures == 0) { assertThat((float) min.getValue(), equalTo(minMetric)); assertThat((float) max.getValue(), equalTo(maxMetric)); } else { assertThat((float) min.getValue(), greaterThanOrEqualTo(minMetric)); assertThat((float) max.getValue(), lessThanOrEqualTo(maxMetric)); } } deleteAsyncSearch(response.getId()); ensureTaskRemoval(response.getId()); } } public void testTermsAggregation() throws Exception { int step = numShards > 2 ? randomIntBetween(2, numShards) : 2; int numFailures = randomBoolean() ? randomIntBetween(0, numShards) : 0; SearchSourceBuilder source = new SearchSourceBuilder() .aggregation(AggregationBuilders.terms("terms").field("terms.keyword").size(numKeywords)); try (SearchResponseIterator it = assertBlockingIterator(indexName, numShards, source, numFailures, step)) { AsyncSearchResponse response = it.next(); while (it.hasNext()) { response = it.next(); assertNotNull(response.getSearchResponse()); if (response.getSearchResponse().getSuccessfulShards() > 0) { assertNotNull(response.getSearchResponse().getAggregations()); assertNotNull(response.getSearchResponse().getAggregations().get("terms")); StringTerms terms = response.getSearchResponse().getAggregations().get("terms"); assertThat(terms.getBuckets().size(), greaterThanOrEqualTo(0)); assertThat(terms.getBuckets().size(), lessThanOrEqualTo(numKeywords)); for (InternalTerms.Bucket<?> bucket : terms.getBuckets()) { long count = keywordFreqs.getOrDefault(bucket.getKeyAsString(), new AtomicInteger(0)).get(); assertThat(bucket.getDocCount(), lessThanOrEqualTo(count)); } } } if (numFailures == numShards) { assertNotNull(response.getFailure()); } else { assertNotNull(response.getSearchResponse()); assertNotNull(response.getSearchResponse().getAggregations()); assertNotNull(response.getSearchResponse().getAggregations().get("terms")); StringTerms terms = response.getSearchResponse().getAggregations().get("terms"); assertThat(terms.getBuckets().size(), greaterThanOrEqualTo(0)); assertThat(terms.getBuckets().size(), lessThanOrEqualTo(numKeywords)); for (InternalTerms.Bucket<?> bucket : terms.getBuckets()) { long count = keywordFreqs.getOrDefault(bucket.getKeyAsString(), new AtomicInteger(0)).get(); if (numFailures > 0) { assertThat(bucket.getDocCount(), lessThanOrEqualTo(count)); } else { assertThat(bucket.getDocCount(), equalTo(count)); } } } deleteAsyncSearch(response.getId()); ensureTaskRemoval(response.getId()); } } public void testRestartAfterCompletion() throws Exception { final AsyncSearchResponse initial; try (SearchResponseIterator it = assertBlockingIterator(indexName, numShards, new SearchSourceBuilder(), 0, 2)) { initial = it.next(); } ensureTaskCompletion(initial.getId()); restartTaskNode(initial.getId(), indexName); AsyncSearchResponse response = getAsyncSearch(initial.getId()); assertNotNull(response.getSearchResponse()); assertFalse(response.isRunning()); assertFalse(response.isPartial()); deleteAsyncSearch(response.getId()); ensureTaskRemoval(response.getId()); } public void testDeleteCancelRunningTask() throws Exception { final AsyncSearchResponse initial; try (SearchResponseIterator it = assertBlockingIterator(indexName, numShards, new SearchSourceBuilder(), randomBoolean() ? 1 : 0, 2)) { initial = it.next(); deleteAsyncSearch(initial.getId()); it.close(); ensureTaskCompletion(initial.getId()); ensureTaskRemoval(initial.getId()); } } public void testDeleteCleanupIndex() throws Exception { try (SearchResponseIterator it = assertBlockingIterator(indexName, numShards, new SearchSourceBuilder(), randomBoolean() ? 1 : 0, 2)) { AsyncSearchResponse response = it.next(); deleteAsyncSearch(response.getId()); it.close(); ensureTaskCompletion(response.getId()); ensureTaskRemoval(response.getId()); } } public void testCleanupOnFailure() throws Exception { final AsyncSearchResponse initial; try (SearchResponseIterator it = assertBlockingIterator(indexName, numShards, new SearchSourceBuilder(), numShards, 2)) { initial = it.next(); } ensureTaskCompletion(initial.getId()); AsyncSearchResponse response = getAsyncSearch(initial.getId()); assertFalse(response.isRunning()); assertNotNull(response.getFailure()); assertTrue(response.isPartial()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getShardFailures().length, equalTo(numShards)); deleteAsyncSearch(initial.getId()); ensureTaskRemoval(initial.getId()); } public void testInvalidId() throws Exception { try (SearchResponseIterator it = assertBlockingIterator(indexName, numShards, new SearchSourceBuilder(), randomBoolean() ? 1 : 0, 2)) { AsyncSearchResponse response = it.next(); ExecutionException exc = expectThrows(ExecutionException.class, () -> getAsyncSearch("invalid")); assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(exc.getMessage(), containsString("invalid id")); while (it.hasNext()) { response = it.next(); } assertFalse(response.isRunning()); } } public void testNoIndex() throws Exception { SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest("invalid-*"); request.setWaitForCompletionTimeout(TimeValue.timeValueMillis(1)); AsyncSearchResponse response = submitAsyncSearch(request); assertNotNull(response.getSearchResponse()); assertFalse(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(0)); request = new SubmitAsyncSearchRequest("invalid"); request.setWaitForCompletionTimeout(TimeValue.timeValueMillis(1)); response = submitAsyncSearch(request); assertNull(response.getSearchResponse()); assertNotNull(response.getFailure()); assertFalse(response.isRunning()); Exception exc = response.getFailure(); assertThat(exc.getMessage(), containsString("error while executing search")); assertThat(exc.getCause().getMessage(), containsString("no such index")); } public void testCancellation() throws Exception { SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(indexName); request.getSearchRequest().source( new SearchSourceBuilder().aggregation(new CancellingAggregationBuilder("test", randomLong())) ); request.setWaitForCompletionTimeout(TimeValue.timeValueMillis(1)); AsyncSearchResponse response = submitAsyncSearch(request); assertNotNull(response.getSearchResponse()); assertTrue(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getSuccessfulShards(), equalTo(0)); assertThat(response.getSearchResponse().getFailedShards(), equalTo(0)); response = getAsyncSearch(response.getId()); assertNotNull(response.getSearchResponse()); assertTrue(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getSuccessfulShards(), equalTo(0)); assertThat(response.getSearchResponse().getFailedShards(), equalTo(0)); deleteAsyncSearch(response.getId()); ensureTaskRemoval(response.getId()); } public void testUpdateRunningKeepAlive() throws Exception { SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(indexName); request.getSearchRequest() .source(new SearchSourceBuilder().aggregation(new CancellingAggregationBuilder("test", randomLong()))); long now = System.currentTimeMillis(); request.setWaitForCompletionTimeout(TimeValue.timeValueMillis(1)); AsyncSearchResponse response = submitAsyncSearch(request); assertNotNull(response.getSearchResponse()); assertTrue(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getSuccessfulShards(), equalTo(0)); assertThat(response.getSearchResponse().getFailedShards(), equalTo(0)); assertThat(response.getExpirationTime(), greaterThan(now)); long expirationTime = response.getExpirationTime(); response = getAsyncSearch(response.getId()); assertNotNull(response.getSearchResponse()); assertTrue(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getSuccessfulShards(), equalTo(0)); assertThat(response.getSearchResponse().getFailedShards(), equalTo(0)); response = getAsyncSearch(response.getId(), TimeValue.timeValueDays(10)); assertThat(response.getExpirationTime(), greaterThan(expirationTime)); assertTrue(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getSuccessfulShards(), equalTo(0)); assertThat(response.getSearchResponse().getFailedShards(), equalTo(0)); response = getAsyncSearch(response.getId(), TimeValue.timeValueMillis(1)); assertThat(response.getExpirationTime(), lessThan(expirationTime)); ensureTaskNotRunning(response.getId()); ensureTaskRemoval(response.getId()); } public void testUpdateStoreKeepAlive() throws Exception { SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(indexName); long now = System.currentTimeMillis(); request.setWaitForCompletionTimeout(TimeValue.timeValueMinutes(10)); request.setKeepOnCompletion(true); AsyncSearchResponse response = submitAsyncSearch(request); assertNotNull(response.getSearchResponse()); assertFalse(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getSuccessfulShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getFailedShards(), equalTo(0)); assertThat(response.getExpirationTime(), greaterThan(now)); long expirationTime = response.getExpirationTime(); response = getAsyncSearch(response.getId()); assertNotNull(response.getSearchResponse()); assertFalse(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getSuccessfulShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getFailedShards(), equalTo(0)); response = getAsyncSearch(response.getId(), TimeValue.timeValueDays(10)); assertThat(response.getExpirationTime(), greaterThan(expirationTime)); assertFalse(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getSuccessfulShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getFailedShards(), equalTo(0)); response = getAsyncSearch(response.getId(), TimeValue.timeValueMillis(1)); assertThat(response.getExpirationTime(), lessThan(expirationTime)); ensureTaskNotRunning(response.getId()); ensureTaskRemoval(response.getId()); } public void testRemoveAsyncIndex() throws Exception { SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(indexName); request.setWaitForCompletionTimeout(TimeValue.timeValueMinutes(10)); request.setKeepOnCompletion(true); long now = System.currentTimeMillis(); AsyncSearchResponse response = submitAsyncSearch(request); assertNotNull(response.getSearchResponse()); assertFalse(response.isRunning()); assertThat(response.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getSuccessfulShards(), equalTo(numShards)); assertThat(response.getSearchResponse().getFailedShards(), equalTo(0)); assertThat(response.getExpirationTime(), greaterThan(now)); // remove the async search index client().admin().indices().prepareDelete(XPackPlugin.ASYNC_RESULTS_INDEX).get(); Exception exc = expectThrows(Exception.class, () -> getAsyncSearch(response.getId())); Throwable cause = exc instanceof ExecutionException ? ExceptionsHelper.unwrapCause(exc.getCause()) : ExceptionsHelper.unwrapCause(exc); assertThat(ExceptionsHelper.status(cause).getStatus(), equalTo(404)); SubmitAsyncSearchRequest newReq = new SubmitAsyncSearchRequest(indexName); newReq.getSearchRequest().source( new SearchSourceBuilder().aggregation(new CancellingAggregationBuilder("test", randomLong())) ); newReq.setWaitForCompletionTimeout(TimeValue.timeValueMillis(1)); AsyncSearchResponse newResp = submitAsyncSearch(newReq); assertNotNull(newResp.getSearchResponse()); assertTrue(newResp.isRunning()); assertThat(newResp.getSearchResponse().getTotalShards(), equalTo(numShards)); assertThat(newResp.getSearchResponse().getSuccessfulShards(), equalTo(0)); assertThat(newResp.getSearchResponse().getFailedShards(), equalTo(0)); long expirationTime = newResp.getExpirationTime(); // check garbage collection newResp = getAsyncSearch(newResp.getId(), TimeValue.timeValueMillis(1)); assertThat(newResp.getExpirationTime(), lessThan(expirationTime)); ensureTaskNotRunning(newResp.getId()); ensureTaskRemoval(newResp.getId()); } public void testSearchPhaseFailureNoCause() throws Exception { SubmitAsyncSearchRequest request = new SubmitAsyncSearchRequest(indexName); request.setKeepOnCompletion(true); request.setWaitForCompletionTimeout(TimeValue.timeValueMinutes(10)); request.getSearchRequest().allowPartialSearchResults(false); request.getSearchRequest() // AlreadyClosedException are ignored by the coordinating node .source(new SearchSourceBuilder().query(new ThrowingQueryBuilder(randomLong(), new AlreadyClosedException("boom"), 0))); AsyncSearchResponse response = submitAsyncSearch(request); assertFalse(response.isRunning()); assertTrue(response.isPartial()); assertThat(response.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE)); assertNotNull(response.getFailure()); ensureTaskNotRunning(response.getId()); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vfs.newvfs; import com.intellij.openapi.application.AccessToken; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.DumbModePermission; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.DumbServiceImpl; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.vfs.ex.VirtualFileManagerEx; import com.intellij.openapi.vfs.impl.local.FileWatcher; import com.intellij.openapi.vfs.impl.local.LocalFileSystemImpl; import com.intellij.openapi.vfs.newvfs.events.VFileEvent; import com.intellij.openapi.vfs.newvfs.persistent.PersistentFS; import com.intellij.openapi.vfs.newvfs.persistent.RefreshWorker; import com.intellij.util.concurrency.Semaphore; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.concurrent.atomic.AtomicLong; /** * @author max */ public class RefreshSessionImpl extends RefreshSession { private static final Logger LOG = Logger.getInstance(RefreshSession.class); private static final AtomicLong ID_COUNTER = new AtomicLong(0); private final long myId = ID_COUNTER.incrementAndGet(); private final boolean myIsAsync; private final boolean myIsRecursive; private final Runnable myFinishRunnable; private final ModalityState myModalityState; private final Semaphore mySemaphore = new Semaphore(); private List<VirtualFile> myWorkQueue = new ArrayList<VirtualFile>(); private List<VFileEvent> myEvents = new ArrayList<VFileEvent>(); private volatile boolean iHaveEventsToFire; private volatile RefreshWorker myWorker = null; private volatile boolean myCancelled = false; private final DumbModePermission myDumbModePermission; private final Throwable myStartTrace; public RefreshSessionImpl(boolean async, boolean recursive, @Nullable Runnable finishRunnable) { this(async, recursive, finishRunnable, ModalityState.NON_MODAL); } public RefreshSessionImpl(boolean async, boolean recursive, @Nullable Runnable finishRunnable, @NotNull ModalityState modalityState) { myIsAsync = async; myIsRecursive = recursive; myFinishRunnable = finishRunnable; myModalityState = modalityState; LOG.assertTrue(modalityState == ModalityState.NON_MODAL || modalityState != ModalityState.any(), "Refresh session should have a specific modality"); if (modalityState == ModalityState.NON_MODAL) { myDumbModePermission = null; myStartTrace = null; } else { myDumbModePermission = DumbServiceImpl.getExplicitPermission(modalityState); myStartTrace = new Throwable(); // please report exceptions here to peter } } public RefreshSessionImpl(@NotNull List<VFileEvent> events) { this(false, false, null, ModalityState.NON_MODAL); myEvents.addAll(events); } @Override public long getId() { return myId; } @Override public void addAllFiles(@NotNull Collection<? extends VirtualFile> files) { for (VirtualFile file : files) { if (file == null) { LOG.error("null passed among " + files); } else { myWorkQueue.add(file); } } } @Override public void addFile(@NotNull VirtualFile file) { myWorkQueue.add(file); } @Override public boolean isAsynchronous() { return myIsAsync; } @Override public void launch() { mySemaphore.down(); ((RefreshQueueImpl)RefreshQueue.getInstance()).execute(this); } public void scan() { List<VirtualFile> workQueue = myWorkQueue; myWorkQueue = new ArrayList<VirtualFile>(); boolean haveEventsToFire = myFinishRunnable != null || !myEvents.isEmpty(); if (!workQueue.isEmpty()) { final LocalFileSystem fileSystem = LocalFileSystem.getInstance(); final FileWatcher watcher; if (fileSystem instanceof LocalFileSystemImpl) { LocalFileSystemImpl fs = (LocalFileSystemImpl)fileSystem; fs.markSuspiciousFilesDirty(workQueue); watcher = fs.getFileWatcher(); } else { watcher = null; } long t = 0; if (LOG.isDebugEnabled()) { LOG.debug("scanning " + workQueue); t = System.currentTimeMillis(); } for (VirtualFile file : workQueue) { if (myCancelled) break; NewVirtualFile nvf = (NewVirtualFile)file; if (!myIsRecursive && (!myIsAsync || (watcher != null && !watcher.isWatched(nvf)))) { // we're unable to definitely refresh synchronously by means of file watcher. nvf.markDirty(); } RefreshWorker worker = myWorker = new RefreshWorker(nvf, myIsRecursive); worker.scan(); List<VFileEvent> events = worker.getEvents(); if (myEvents.addAll(events)) { haveEventsToFire = true; } } if (t != 0) { t = System.currentTimeMillis() - t; LOG.debug((myCancelled ? "cancelled, " : "done, ") + t + " ms, events " + myEvents); } } myWorker = null; iHaveEventsToFire = haveEventsToFire; } public void cancel() { myCancelled = true; RefreshWorker worker = myWorker; if (worker != null) { worker.cancel(); } } public void fireEvents(final boolean hasWriteAction) { AccessToken token = myStartTrace == null ? null : DumbServiceImpl.forceDumbModeStartTrace(myStartTrace); try { if (!iHaveEventsToFire || ApplicationManager.getApplication().isDisposed()) return; Runnable runnable = new Runnable() { public void run() { if (hasWriteAction) { fireEventsInWriteAction(); } else { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { fireEventsInWriteAction(); } }); } } }; if (myDumbModePermission != null) { DumbService.allowStartingDumbModeInside(myDumbModePermission, runnable); } else { runnable.run(); } } finally { if (token != null) { token.finish(); } mySemaphore.up(); } } protected void fireEventsInWriteAction() { final VirtualFileManagerEx manager = (VirtualFileManagerEx)VirtualFileManager.getInstance(); manager.fireBeforeRefreshStart(myIsAsync); try { while (!myWorkQueue.isEmpty() || !myEvents.isEmpty()) { PersistentFS.getInstance().processEvents(mergeEventsAndReset()); scan(); } } finally { try { manager.fireAfterRefreshFinish(myIsAsync); } finally { if (myFinishRunnable != null) { myFinishRunnable.run(); } } } } public void waitFor() { mySemaphore.waitFor(); } private List<VFileEvent> mergeEventsAndReset() { Set<VFileEvent> mergedEvents = new LinkedHashSet<VFileEvent>(myEvents); List<VFileEvent> events = new ArrayList<VFileEvent>(mergedEvents); myEvents = new ArrayList<VFileEvent>(); return events; } @NotNull public ModalityState getModalityState() { return myModalityState; } @Override public String toString() { return myWorkQueue.size() <= 1 ? "" : myWorkQueue.size() + " roots in queue."; } }
package ca.uhn.fhir.tests.integration.karaf.dstu21; import java.io.IOException; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.UUID; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.parser.DataFormatException; import ca.uhn.fhir.parser.IParser; import ca.uhn.fhir.parser.LenientErrorHandler; import ca.uhn.fhir.parser.StrictErrorHandler; import com.google.common.collect.Sets; import org.hamcrest.Matcher; import org.hamcrest.Matchers; import org.hl7.fhir.dstu2016may.model.Conformance; import org.hl7.fhir.dstu2016may.model.PrimitiveType; import org.hl7.fhir.instance.model.api.IIdType; import org.junit.Assert; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.ops4j.pax.exam.Configuration; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.junit.PaxExam; import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy; import org.ops4j.pax.exam.spi.reactors.PerClass; import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.HAPI_FHIR_DSTU2_1; import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.KARAF; import static ca.uhn.fhir.tests.integration.karaf.PaxExamOptions.WRAP; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.not; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.ops4j.pax.exam.CoreOptions.mavenBundle; import static org.ops4j.pax.exam.CoreOptions.options; import static org.ops4j.pax.exam.CoreOptions.when; import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.debugConfiguration; /** * Useful docs about this test: https://ops4j1.jira.com/wiki/display/paxexam/FAQ */ @RunWith(PaxExam.class) @ExamReactorStrategy(PerClass.class) public class JsonParserDstu2_1Test { private final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(JsonParserDstu2_1Test.class); private FhirContext ourCtx = FhirContext.forDstu2_1(); @Configuration public Option[] config() { return options( KARAF.option(), WRAP.option(), HAPI_FHIR_DSTU2_1.option(), mavenBundle().groupId("org.apache.servicemix.bundles").artifactId("org.apache.servicemix.bundles.hamcrest").versionAsInProject(), when(false) .useOptions( debugConfiguration("5005", true)) ); } @Test public void testOverrideResourceIdWithBundleEntryFullUrlDisabled_ConfiguredOnFhirContext() { try { String tmp = "{\"resourceType\":\"Bundle\",\"entry\":[{\"fullUrl\":\"http://lalaland.org/patient/pat1\",\"resource\":{\"resourceType\":\"Patient\",\"id\":\"patxuzos\"}}]}"; ourCtx.getParserOptions().setOverrideResourceIdWithBundleEntryFullUrl(false); org.hl7.fhir.dstu2016may.model.Bundle bundle = (org.hl7.fhir.dstu2016may.model.Bundle) ourCtx.newJsonParser().parseResource(tmp); assertEquals(1, bundle.getEntry().size()); { org.hl7.fhir.dstu2016may.model.Patient o1 = (org.hl7.fhir.dstu2016may.model.Patient) bundle.getEntry().get(0).getResource(); IIdType o1Id = o1.getIdElement(); assertFalse(o1Id.hasBaseUrl()); assertEquals("Patient", o1Id.getResourceType()); assertEquals("patxuzos", o1Id.getIdPart()); assertFalse(o1Id.hasVersionIdPart()); } } finally { // ensure we cleanup ourCtx so other tests continue to work ourCtx = FhirContext.forDstu2_1(); } } @Test public void testOverrideResourceIdWithBundleEntryFullUrlDisabled_ConfiguredOnParser() { try { String tmp = "{\"resourceType\":\"Bundle\",\"entry\":[{\"fullUrl\":\"http://lalaland.org/patient/pat1\",\"resource\":{\"resourceType\":\"Patient\",\"id\":\"patxuzos\"}}]}"; org.hl7.fhir.dstu2016may.model.Bundle bundle = (org.hl7.fhir.dstu2016may.model.Bundle) ourCtx.newJsonParser().setOverrideResourceIdWithBundleEntryFullUrl(false).parseResource(tmp); assertEquals(1, bundle.getEntry().size()); { org.hl7.fhir.dstu2016may.model.Patient o1 = (org.hl7.fhir.dstu2016may.model.Patient) bundle.getEntry().get(0).getResource(); IIdType o1Id = o1.getIdElement(); assertFalse(o1Id.hasBaseUrl()); assertEquals("Patient", o1Id.getResourceType()); assertEquals("patxuzos", o1Id.getIdPart()); assertFalse(o1Id.hasVersionIdPart()); } } finally { // ensure we cleanup ourCtx so other tests continue to work ourCtx = FhirContext.forDstu2_1(); } } /** * #480 */ @Test public void testEncodeEmptyValue() { org.hl7.fhir.dstu2016may.model.QuestionnaireResponse qr = new org.hl7.fhir.dstu2016may.model.QuestionnaireResponse(); qr.setId("123"); qr.getAuthoredElement().setValueAsString(""); qr.addItem().setLinkIdElement(new org.hl7.fhir.dstu2016may.model.StringType()); qr.getItem().get(0).addItem().setLinkIdElement(new org.hl7.fhir.dstu2016may.model.StringType("")); qr.getItem().get(0).addItem().setLinkIdElement(new org.hl7.fhir.dstu2016may.model.StringType("LINKID")); String encoded = ourCtx.newJsonParser().encodeResourceToString(qr); ourLog.info(encoded); assertThat(encoded, stringContainsInOrder("123")); assertThat(encoded, not(stringContainsInOrder("\"\""))); assertThat(encoded, not(stringContainsInOrder("null"))); } @Test public void testEncodeAndParseExtensions() throws Exception { org.hl7.fhir.dstu2016may.model.Patient patient = new org.hl7.fhir.dstu2016may.model.Patient(); patient.addIdentifier().setUse(org.hl7.fhir.dstu2016may.model.Identifier.IdentifierUse.OFFICIAL).setSystem("urn:example").setValue("7000135"); org.hl7.fhir.dstu2016may.model.Extension ext = new org.hl7.fhir.dstu2016may.model.Extension(); ext.setUrl("http://example.com/extensions#someext"); ext.setValue(new org.hl7.fhir.dstu2016may.model.DateTimeType("2011-01-02T11:13:15")); patient.addExtension(ext); org.hl7.fhir.dstu2016may.model.Extension parent = new org.hl7.fhir.dstu2016may.model.Extension().setUrl("http://example.com#parent"); patient.addExtension(parent); org.hl7.fhir.dstu2016may.model.Extension child1 = new org.hl7.fhir.dstu2016may.model.Extension().setUrl("http://example.com#child").setValue(new org.hl7.fhir.dstu2016may.model.StringType("value1")); parent.addExtension(child1); org.hl7.fhir.dstu2016may.model.Extension child2 = new org.hl7.fhir.dstu2016may.model.Extension().setUrl("http://example.com#child").setValue(new org.hl7.fhir.dstu2016may.model.StringType("value2")); parent.addExtension(child2); org.hl7.fhir.dstu2016may.model.Extension modExt = new org.hl7.fhir.dstu2016may.model.Extension(); modExt.setUrl("http://example.com/extensions#modext"); modExt.setValue(new org.hl7.fhir.dstu2016may.model.DateType("1995-01-02")); patient.addModifierExtension(modExt); org.hl7.fhir.dstu2016may.model.HumanName name = patient.addName(); name.addFamily("Blah"); org.hl7.fhir.dstu2016may.model.StringType given = name.addGivenElement(); given.setValue("Joe"); org.hl7.fhir.dstu2016may.model.Extension ext2 = new org.hl7.fhir.dstu2016may.model.Extension().setUrl("http://examples.com#givenext").setValue(new org.hl7.fhir.dstu2016may.model.StringType("given")); given.addExtension(ext2); org.hl7.fhir.dstu2016may.model.StringType given2 = name.addGivenElement(); given2.setValue("Shmoe"); org.hl7.fhir.dstu2016may.model.Extension given2ext = new org.hl7.fhir.dstu2016may.model.Extension().setUrl("http://examples.com#givenext_parent"); given2.addExtension(given2ext); given2ext.addExtension(new org.hl7.fhir.dstu2016may.model.Extension().setUrl("http://examples.com#givenext_child").setValue(new org.hl7.fhir.dstu2016may.model.StringType("CHILD"))); String output = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); ourLog.info(output); String enc = ourCtx.newJsonParser().encodeResourceToString(patient); assertThat(enc, stringContainsInOrder("{\"resourceType\":\"Patient\",", "\"extension\":[{\"url\":\"http://example.com/extensions#someext\",\"valueDateTime\":\"2011-01-02T11:13:15\"}", "{\"url\":\"http://example.com#parent\",\"extension\":[{\"url\":\"http://example.com#child\",\"valueString\":\"value1\"},{\"url\":\"http://example.com#child\",\"valueString\":\"value2\"}]}")); assertThat(enc, stringContainsInOrder("\"modifierExtension\":[" + "{" + "\"url\":\"http://example.com/extensions#modext\"," + "\"valueDate\":\"1995-01-02\"" + "}" + "],")); assertThat(enc, containsString("\"_given\":[" + "{" + "\"extension\":[" + "{" + "\"url\":\"http://examples.com#givenext\"," + "\"valueString\":\"given\"" + "}" + "]" + "}," + "{" + "\"extension\":[" + "{" + "\"url\":\"http://examples.com#givenext_parent\"," + "\"extension\":[" + "{" + "\"url\":\"http://examples.com#givenext_child\"," + "\"valueString\":\"CHILD\"" + "}" + "]" + "}" + "]" + "}")); /* * Now parse this back */ org.hl7.fhir.dstu2016may.model.Patient parsed = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Patient.class, enc); ext = parsed.getExtension().get(0); assertEquals("http://example.com/extensions#someext", ext.getUrl()); assertEquals("2011-01-02T11:13:15", ((org.hl7.fhir.dstu2016may.model.DateTimeType) ext.getValue()).getValueAsString()); parent = patient.getExtension().get(1); assertEquals("http://example.com#parent", parent.getUrl()); assertNull(parent.getValue()); child1 = parent.getExtension().get(0); assertEquals("http://example.com#child", child1.getUrl()); assertEquals("value1", ((org.hl7.fhir.dstu2016may.model.StringType) child1.getValue()).getValueAsString()); child2 = parent.getExtension().get(1); assertEquals("http://example.com#child", child2.getUrl()); assertEquals("value2", ((org.hl7.fhir.dstu2016may.model.StringType) child2.getValue()).getValueAsString()); modExt = parsed.getModifierExtension().get(0); assertEquals("http://example.com/extensions#modext", modExt.getUrl()); assertEquals("1995-01-02", ((org.hl7.fhir.dstu2016may.model.DateType) modExt.getValue()).getValueAsString()); name = parsed.getName().get(0); ext2 = name.getGiven().get(0).getExtension().get(0); assertEquals("http://examples.com#givenext", ext2.getUrl()); assertEquals("given", ((org.hl7.fhir.dstu2016may.model.StringType) ext2.getValue()).getValueAsString()); given2ext = name.getGiven().get(1).getExtension().get(0); assertEquals("http://examples.com#givenext_parent", given2ext.getUrl()); assertNull(given2ext.getValue()); org.hl7.fhir.dstu2016may.model.Extension given2ext2 = given2ext.getExtension().get(0); assertEquals("http://examples.com#givenext_child", given2ext2.getUrl()); assertEquals("CHILD", ((org.hl7.fhir.dstu2016may.model.StringType) given2ext2.getValue()).getValue()); } @Test public void testEncodeAndParseMetaProfileAndTags() { org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.addName().addFamily("FAMILY"); p.getMeta().addProfile("http://foo/Profile1"); p.getMeta().addProfile("http://foo/Profile2"); p.getMeta().addTag().setSystem("scheme1").setCode("term1").setDisplay("label1"); p.getMeta().addTag().setSystem("scheme2").setCode("term2").setDisplay("label2"); p.getMeta().addSecurity().setSystem("sec_scheme1").setCode("sec_term1").setDisplay("sec_label1"); p.getMeta().addSecurity().setSystem("sec_scheme2").setCode("sec_term2").setDisplay("sec_label2"); String enc = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); //@formatter:off assertThat(enc, stringContainsInOrder("\"meta\": {", "\"profile\": [", "\"http://foo/Profile1\",", "\"http://foo/Profile2\"", "],", "\"security\": [", "{", "\"system\": \"sec_scheme1\",", "\"code\": \"sec_term1\",", "\"display\": \"sec_label1\"", "},", "{", "\"system\": \"sec_scheme2\",", "\"code\": \"sec_term2\",", "\"display\": \"sec_label2\"", "}", "],", "\"tag\": [", "{", "\"system\": \"scheme1\",", "\"code\": \"term1\",", "\"display\": \"label1\"", "},", "{", "\"system\": \"scheme2\",", "\"code\": \"term2\",", "\"display\": \"label2\"", "}", "]", "},")); //@formatter:on org.hl7.fhir.dstu2016may.model.Patient parsed = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Patient.class, enc); List<org.hl7.fhir.dstu2016may.model.UriType> gotLabels = parsed.getMeta().getProfile(); assertEquals(2, gotLabels.size()); org.hl7.fhir.dstu2016may.model.UriType label = gotLabels.get(0); assertEquals("http://foo/Profile1", label.getValue()); label = gotLabels.get(1); assertEquals("http://foo/Profile2", label.getValue()); List<org.hl7.fhir.dstu2016may.model.Coding> tagList = parsed.getMeta().getTag(); assertEquals(2, tagList.size()); assertEquals("scheme1", tagList.get(0).getSystem()); assertEquals("term1", tagList.get(0).getCode()); assertEquals("label1", tagList.get(0).getDisplay()); assertEquals("scheme2", tagList.get(1).getSystem()); assertEquals("term2", tagList.get(1).getCode()); assertEquals("label2", tagList.get(1).getDisplay()); tagList = parsed.getMeta().getSecurity(); assertEquals(2, tagList.size()); assertEquals("sec_scheme1", tagList.get(0).getSystem()); assertEquals("sec_term1", tagList.get(0).getCode()); assertEquals("sec_label1", tagList.get(0).getDisplay()); assertEquals("sec_scheme2", tagList.get(1).getSystem()); assertEquals("sec_term2", tagList.get(1).getCode()); assertEquals("sec_label2", tagList.get(1).getDisplay()); } /** * See #336 */ @Test public void testEncodeAndParseNullPrimitiveWithExtensions() { org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.setId("patid"); org.hl7.fhir.dstu2016may.model.HumanName name = p.addName(); name.addGivenElement().setValue(null).setId("f0").addExtension(new org.hl7.fhir.dstu2016may.model.Extension("http://foo", new org.hl7.fhir.dstu2016may.model.StringType("FOOEXT0"))); name.addGivenElement().setValue("V1").setId("f1").addExtension((org.hl7.fhir.dstu2016may.model.Extension) new org.hl7.fhir.dstu2016may.model.Extension("http://foo", new org.hl7.fhir.dstu2016may.model.StringType("FOOEXT1")).setId("ext1id")); name.addGivenElement(); // this one shouldn't get encoded name.addGivenElement().setValue(null).addExtension(new org.hl7.fhir.dstu2016may.model.Extension("http://foo", new org.hl7.fhir.dstu2016may.model.StringType("FOOEXT3"))); name.setId("nameid"); String output = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(p); ourLog.info(output); output = ourCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(p); String expected = "{\"resourceType\":\"Patient\",\"id\":\"patid\",\"name\":[{\"id\":\"nameid\",\"given\":[null,\"V1\",null],\"_given\":[{\"id\":\"f0\",\"extension\":[{\"url\":\"http://foo\",\"valueString\":\"FOOEXT0\"}]},{\"id\":\"f1\",\"extension\":[{\"id\":\"ext1id\",\"url\":\"http://foo\",\"valueString\":\"FOOEXT1\"}]},{\"extension\":[{\"url\":\"http://foo\",\"valueString\":\"FOOEXT3\"}]}]}]}"; ourLog.info("Exp: {}", expected); ourLog.info("Act: {}", output); assertEquals(expected, output); p = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Patient.class, output); assertEquals("patid", p.getIdElement().getIdPart()); name = p.getName().get(0); assertEquals("nameid", name.getId()); assertEquals(3, name.getGiven().size()); assertEquals(null, name.getGiven().get(0).getValue()); assertEquals("V1", name.getGiven().get(1).getValue()); assertEquals(null, name.getGiven().get(2).getValue()); assertEquals("f0", name.getGiven().get(0).getId()); assertEquals("f1", name.getGiven().get(1).getId()); assertEquals(null, name.getGiven().get(2).getId()); assertEquals(1, name.getGiven().get(0).getExtension().size()); assertEquals("http://foo", name.getGiven().get(0).getExtension().get(0).getUrl()); assertEquals("FOOEXT0", ((org.hl7.fhir.dstu2016may.model.StringType) name.getGiven().get(0).getExtension().get(0).getValue()).getValue()); assertEquals(null, name.getGiven().get(0).getExtension().get(0).getId()); assertEquals(1, name.getGiven().get(1).getExtension().size()); assertEquals("http://foo", name.getGiven().get(1).getExtension().get(0).getUrl()); assertEquals("FOOEXT1", ((org.hl7.fhir.dstu2016may.model.StringType) name.getGiven().get(1).getExtension().get(0).getValue()).getValue()); assertEquals("ext1id", name.getGiven().get(1).getExtension().get(0).getId()); assertEquals(1, name.getGiven().get(2).getExtension().size()); assertEquals("http://foo", name.getGiven().get(2).getExtension().get(0).getUrl()); assertEquals("FOOEXT3", ((org.hl7.fhir.dstu2016may.model.StringType) name.getGiven().get(2).getExtension().get(0).getValue()).getValue()); assertEquals(null, name.getGiven().get(2).getExtension().get(0).getId()); } @Test public void testEncodeBundleNewBundleNoText() { org.hl7.fhir.dstu2016may.model.Bundle b = new org.hl7.fhir.dstu2016may.model.Bundle(); org.hl7.fhir.dstu2016may.model.Bundle.BundleEntryComponent e = b.addEntry(); e.setResource(new org.hl7.fhir.dstu2016may.model.Patient()); String val = ourCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(b); ourLog.info(val); assertThat(val, not(containsString("text"))); val = ourCtx.newXmlParser().setPrettyPrint(false).encodeResourceToString(b); ourLog.info(val); assertThat(val, not(containsString("text"))); } /** * See #326 */ @Test public void testEncodeContainedResource() { org.hl7.fhir.dstu2016may.model.Patient patient = new org.hl7.fhir.dstu2016may.model.Patient(); patient.getBirthDateElement().setValueAsString("2016-04-05"); patient.addExtension().setUrl("test").setValue(new org.hl7.fhir.dstu2016may.model.Reference(new org.hl7.fhir.dstu2016may.model.Condition())); String encoded = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient); ourLog.info(encoded); //@formatter:off assertThat(encoded, stringContainsInOrder( "{", "\"resourceType\": \"Patient\",", "\"contained\": [", "{", "\"resourceType\": \"Condition\",", "\"id\": \"1\"", "}", "],", "\"extension\": [", "{", "\"url\": \"test\",", "\"valueReference\": {", "\"reference\": \"#1\"", "}", "}", "],", "\"birthDate\": \"2016-04-05\"", "}" )); //@formatter:on } @Test public void testEncodeDoesntIncludeUuidId() { org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.setId(new org.hl7.fhir.dstu2016may.model.IdType("urn:uuid:42795ed8-041f-4ebf-b6f4-78ef6f64c2f2")); p.addIdentifier().setSystem("ACME"); String actual = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(p); assertThat(actual, not(containsString("78ef6f64c2f2"))); } @Test public void testEncodeEmptyBinary() { String output = ourCtx.newJsonParser().encodeResourceToString(new org.hl7.fhir.dstu2016may.model.Binary()); assertEquals("{\"resourceType\":\"Binary\"}", output); } /** * #158 */ @Test public void testEncodeEmptyTag() { ArrayList<org.hl7.fhir.dstu2016may.model.Coding> tagList = new ArrayList<>(); tagList.add(new org.hl7.fhir.dstu2016may.model.Coding()); tagList.add(new org.hl7.fhir.dstu2016may.model.Coding().setDisplay("Label")); org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.getMeta().getTag().addAll(tagList); String encoded = ourCtx.newJsonParser().encodeResourceToString(p); assertThat(encoded, not(containsString("tag"))); } /** * #158 */ @Test public void testEncodeEmptyTag2() { ArrayList<org.hl7.fhir.dstu2016may.model.Coding> tagList = new ArrayList<>(); tagList.add(new org.hl7.fhir.dstu2016may.model.Coding().setSystem("scheme").setCode("code")); tagList.add(new org.hl7.fhir.dstu2016may.model.Coding().setDisplay("Label")); org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.getMeta().getTag().addAll(tagList); String encoded = ourCtx.newJsonParser().encodeResourceToString(p); assertThat(encoded, containsString("tag")); assertThat(encoded, containsString("scheme")); assertThat(encoded, not(containsString("Label"))); } @Test public void testEncodeExtendedInfrastructureComponent() { IParser parser = ourCtx.newJsonParser(); PatientWithExtendedContactDstu3 patient = new PatientWithExtendedContactDstu3(); patient.setId("123"); PatientWithExtendedContactDstu3.CustomContactComponent customContactComponent = new PatientWithExtendedContactDstu3.CustomContactComponent(); customContactComponent.getEyeColour().setValue("EYE"); customContactComponent.getName().addFamily("FAMILY"); patient.getCustomContact().add(customContactComponent); String val = parser.encodeResourceToString(patient); ourLog.info(val); assertEquals( "{\"resourceType\":\"Patient\",\"id\":\"123\",\"contact\":[{\"extension\":[{\"url\":\"http://foo.com/contact-eyecolour\",\"valueIdentifier\":{\"value\":\"EYE\"}}],\"name\":{\"family\":[\"FAMILY\"]}}]}", val); FhirContext newCtx = FhirContext.forDstu2_1(); PatientWithExtendedContactDstu3 actual = newCtx.newJsonParser().parseResource(PatientWithExtendedContactDstu3.class, val); assertEquals("EYE", actual.getCustomContact().get(0).getEyeColour().getValue()); } @Test public void testEncodeExtensionInPrimitiveElement() { Conformance c = new Conformance(); c.getAcceptUnknownElement().addExtension().setUrl("http://foo").setValue(new org.hl7.fhir.dstu2016may.model.StringType("AAA")); String encoded = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(c); ourLog.info(encoded); encoded = ourCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(c); ourLog.info(encoded); assertEquals(encoded, "{\"resourceType\":\"Conformance\",\"_acceptUnknown\":{\"extension\":[{\"url\":\"http://foo\",\"valueString\":\"AAA\"}]}}"); // Now with a value ourLog.info("---------------"); c = new Conformance(); c.getAcceptUnknownElement().setValue(Conformance.UnknownContentCode.ELEMENTS); c.getAcceptUnknownElement().addExtension().setUrl("http://foo").setValue(new org.hl7.fhir.dstu2016may.model.StringType("AAA")); encoded = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(c); ourLog.info(encoded); encoded = ourCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(c); ourLog.info(encoded); assertEquals(encoded, "{\"resourceType\":\"Conformance\",\"acceptUnknown\":\"elements\",\"_acceptUnknown\":{\"extension\":[{\"url\":\"http://foo\",\"valueString\":\"AAA\"}]}}"); } @Test public void testEncodeExtensionUndeclaredNonModifier() { org.hl7.fhir.dstu2016may.model.Observation obs = new org.hl7.fhir.dstu2016may.model.Observation(); obs.setId("1"); obs.getMeta().addProfile("http://profile"); org.hl7.fhir.dstu2016may.model.Extension ext = obs.addExtension(); ext.setUrl("http://exturl").setValue(new org.hl7.fhir.dstu2016may.model.StringType("ext_url_value")); obs.getCode().setText("CODE"); IParser parser = ourCtx.newJsonParser(); String output = parser.setPrettyPrint(true).encodeResourceToString(obs); ourLog.info(output); //@formatter:off assertThat(output, stringContainsInOrder( "\"id\": \"1\"", "\"meta\"", "\"extension\"", "\"url\": \"http://exturl\"", "\"valueString\": \"ext_url_value\"", "\"code\":" )); assertThat(output, not(stringContainsInOrder( "\"url\": \"http://exturl\"", ",", "\"url\": \"http://exturl\"" ))); //@formatter:on obs = parser.parseResource(org.hl7.fhir.dstu2016may.model.Observation.class, output); assertEquals(1, obs.getExtension().size()); assertEquals("http://exturl", obs.getExtension().get(0).getUrl()); assertEquals("ext_url_value", ((org.hl7.fhir.dstu2016may.model.StringType) obs.getExtension().get(0).getValue()).getValue()); } @Test public void testEncodeExtensionUndeclaredNonModifierWithChildExtension() { org.hl7.fhir.dstu2016may.model.Observation obs = new org.hl7.fhir.dstu2016may.model.Observation(); obs.setId("1"); obs.getMeta().addProfile("http://profile"); org.hl7.fhir.dstu2016may.model.Extension ext = obs.addExtension(); ext.setUrl("http://exturl"); org.hl7.fhir.dstu2016may.model.Extension subExt = ext.addExtension(); subExt.setUrl("http://subext").setValue(new org.hl7.fhir.dstu2016may.model.StringType("sub_ext_value")); obs.getCode().setText("CODE"); IParser parser = ourCtx.newJsonParser(); String output = parser.setPrettyPrint(true).encodeResourceToString(obs); ourLog.info(output); //@formatter:off assertThat(output, stringContainsInOrder( "\"id\": \"1\"", "\"meta\"", "\"extension\"", "\"url\": \"http://exturl\"", "\"extension\"", "\"url\": \"http://subext\"", "\"valueString\": \"sub_ext_value\"", "\"code\":" )); assertThat(output, not(stringContainsInOrder( "\"url\": \"http://exturl\"", ",", "\"url\": \"http://exturl\"" ))); //@formatter:on obs = parser.parseResource(org.hl7.fhir.dstu2016may.model.Observation.class, output); assertEquals(1, obs.getExtension().size()); assertEquals("http://exturl", obs.getExtension().get(0).getUrl()); assertEquals(1, obs.getExtension().get(0).getExtension().size()); assertEquals("http://subext", obs.getExtension().get(0).getExtension().get(0).getUrl()); assertEquals("sub_ext_value", ((org.hl7.fhir.dstu2016may.model.StringType) obs.getExtension().get(0).getExtension().get(0).getValue()).getValue()); } @Test public void testEncodeHistoryEncodeVersionsAtPath1() { ourCtx = FhirContext.forDstu2_1(); assertNull(ourCtx.newJsonParser().getStripVersionsFromReferences()); org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.setManagingOrganization(new org.hl7.fhir.dstu2016may.model.Reference("http://foo.com/Organization/2/_history/1")); IParser parser = ourCtx.newJsonParser(); parser.setDontStripVersionsFromReferencesAtPaths("Patient.managingOrganization"); String enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2/_history/1\"")); } @Test public void testEncodeHistoryEncodeVersionsAtPath2() { ourCtx = FhirContext.forDstu2_1(); assertNull(ourCtx.newJsonParser().getStripVersionsFromReferences()); assertTrue(ourCtx.getParserOptions().isStripVersionsFromReferences()); org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.setManagingOrganization(new org.hl7.fhir.dstu2016may.model.Reference("http://foo.com/Organization/2/_history/1")); IParser parser = ourCtx.newJsonParser(); parser.setDontStripVersionsFromReferencesAtPaths("AuditEvent.entity.reference"); String enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2\"")); } @Test public void testEncodeHistoryEncodeVersionsAtPath3() { ourCtx = FhirContext.forDstu2_1(); assertNull(ourCtx.newJsonParser().getStripVersionsFromReferences()); org.hl7.fhir.dstu2016may.model.AuditEvent auditEvent = new org.hl7.fhir.dstu2016may.model.AuditEvent(); auditEvent.addEntity().setReference(new org.hl7.fhir.dstu2016may.model.Reference("http://foo.com/Organization/2/_history/1")); IParser parser = ourCtx.newJsonParser(); parser.setDontStripVersionsFromReferencesAtPaths("AuditEvent.entity.reference"); String enc = parser.setPrettyPrint(true).encodeResourceToString(auditEvent); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2/_history/1\"")); parser.setDontStripVersionsFromReferencesAtPaths(new ArrayList<String>()); enc = parser.setPrettyPrint(true).encodeResourceToString(auditEvent); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2\"")); parser.setDontStripVersionsFromReferencesAtPaths((String[]) null); enc = parser.setPrettyPrint(true).encodeResourceToString(auditEvent); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2\"")); parser.setDontStripVersionsFromReferencesAtPaths((List<String>) null); enc = parser.setPrettyPrint(true).encodeResourceToString(auditEvent); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2\"")); } @Test public void testEncodeHistoryEncodeVersionsAtPathUsingOptions() { ourCtx = FhirContext.forDstu2_1(); assertNull(ourCtx.newJsonParser().getStripVersionsFromReferences()); assertTrue(ourCtx.getParserOptions().isStripVersionsFromReferences()); assertThat(ourCtx.getParserOptions().getDontStripVersionsFromReferencesAtPaths(), empty()); org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.setManagingOrganization(new org.hl7.fhir.dstu2016may.model.Reference("http://foo.com/Organization/2/_history/1")); IParser parser = ourCtx.newJsonParser(); ourCtx.getParserOptions().setDontStripVersionsFromReferencesAtPaths("Patient.managingOrganization"); String enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2/_history/1\"")); ourCtx.getParserOptions().setDontStripVersionsFromReferencesAtPaths(Arrays.asList("Patient.managingOrganization")); enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2/_history/1\"")); ourCtx.getParserOptions().setDontStripVersionsFromReferencesAtPaths(new HashSet<String>(Arrays.asList("Patient.managingOrganization"))); enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2/_history/1\"")); } @Test public void testEncodeHistoryStripVersionsFromReferences() { ourCtx = FhirContext.forDstu2_1(); assertNull(ourCtx.newJsonParser().getStripVersionsFromReferences()); org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.setManagingOrganization(new org.hl7.fhir.dstu2016may.model.Reference("http://foo.com/Organization/2/_history/1")); IParser parser = ourCtx.newJsonParser(); String enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2\"")); parser.setStripVersionsFromReferences(false); enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2/_history/1\"")); ourCtx = FhirContext.forDstu2_1(); } @Test public void testEncodeHistoryStripVersionsFromReferencesFromContext() { ourCtx = FhirContext.forDstu2_1(); assertTrue(ourCtx.getParserOptions().isStripVersionsFromReferences()); org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.setManagingOrganization(new org.hl7.fhir.dstu2016may.model.Reference("http://foo.com/Organization/2/_history/1")); IParser parser = ourCtx.newJsonParser(); String enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2\"")); ourCtx.getParserOptions().setStripVersionsFromReferences(false); enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2/_history/1\"")); parser.setStripVersionsFromReferences(true); enc = parser.setPrettyPrint(true).encodeResourceToString(p); ourLog.info(enc); assertThat(enc, containsString("\"reference\": \"http://foo.com/Organization/2\"")); ourCtx = FhirContext.forDstu2_1(); } @Test public void testEncodeNarrativeShouldIncludeNamespace() { org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.getText().setDivAsString("<div>VALUE</div>"); String output = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(p); ourLog.info(output); assertThat(output, containsString("\"div\": \"<div xmlns=\\\"http://www.w3.org/1999/xhtml\\\">VALUE</div>\"")); } @Test public void testEncodeNarrativeShouldIncludeNamespaceWithProcessingInstruction() { org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.getText().setDivAsString("<?xml version=\"1.0\" encoding=\"UTF-8\"?><div>VALUE</div>"); String output = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(p); ourLog.info(output); assertThat(output, containsString("\"div\": \"<div xmlns=\\\"http://www.w3.org/1999/xhtml\\\">VALUE</div>\"")); } @Test public void testEncodeNarrativeSuppressed() throws Exception { org.hl7.fhir.dstu2016may.model.Patient patient = new org.hl7.fhir.dstu2016may.model.Patient(); patient.setId("Patient/1/_history/1"); patient.getText().setDivAsString("<div>THE DIV</div>"); patient.addName().addFamily("FAMILY"); patient.getMaritalStatus().addCoding().setCode("D"); String encoded = ourCtx.newJsonParser().setPrettyPrint(true).setSuppressNarratives(true).encodeResourceToString(patient); ourLog.info(encoded); assertThat(encoded, containsString("Patient")); assertThat(encoded, stringContainsInOrder(ca.uhn.fhir.rest.api.Constants.TAG_SUBSETTED_SYSTEM_DSTU3, ca.uhn.fhir.rest.api.Constants.TAG_SUBSETTED_CODE)); assertThat(encoded, not(containsString("text"))); assertThat(encoded, not(containsString("THE DIV"))); assertThat(encoded, containsString("family")); assertThat(encoded, containsString("maritalStatus")); } @Test public void testEncodeParametersWithId() { org.hl7.fhir.dstu2016may.model.Parameters reqParms = new org.hl7.fhir.dstu2016may.model.Parameters(); org.hl7.fhir.dstu2016may.model.IdType patient = new org.hl7.fhir.dstu2016may.model.IdType(1); reqParms.addParameter().setName("patient").setValue(patient); String enc = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(reqParms); ourLog.info(enc); assertThat(enc, containsString("\"valueId\": \"1\"")); } @Test public void testEncodeSummary() { org.hl7.fhir.dstu2016may.model.Patient patient = new org.hl7.fhir.dstu2016may.model.Patient(); patient.setId("Patient/1/_history/1"); patient.getText().setDivAsString("<div>THE DIV</div>"); patient.addName().addFamily("FAMILY"); patient.addPhoto().setTitle("green"); patient.getMaritalStatus().addCoding().setCode("D"); ourLog.info(ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(patient)); String encoded = ourCtx.newJsonParser().setPrettyPrint(true).setSummaryMode(true).encodeResourceToString(patient); ourLog.info(encoded); assertThat(encoded, containsString("Patient")); assertThat(encoded, stringContainsInOrder("\"tag\"", "\"system\": \"" + ca.uhn.fhir.rest.api.Constants.TAG_SUBSETTED_SYSTEM_DSTU3 + "\",", "\"code\": \"" + ca.uhn.fhir.rest.api.Constants.TAG_SUBSETTED_CODE + "\"")); assertThat(encoded, not(containsString("THE DIV"))); assertThat(encoded, containsString("family")); assertThat(encoded, not(containsString("maritalStatus"))); } @Test public void testEncodeSummary2() { org.hl7.fhir.dstu2016may.model.Patient patient = new org.hl7.fhir.dstu2016may.model.Patient(); patient.setId("Patient/1/_history/1"); patient.getText().setDivAsString("<div>THE DIV</div>"); patient.addName().addFamily("FAMILY"); patient.getMaritalStatus().addCoding().setCode("D"); patient.getMeta().addTag().setSystem("foo").setCode("bar"); String encoded = ourCtx.newJsonParser().setPrettyPrint(true).setSummaryMode(true).encodeResourceToString(patient); ourLog.info(encoded); assertThat(encoded, containsString("Patient")); assertThat(encoded, stringContainsInOrder("\"tag\"", "\"system\": \"foo\",", "\"code\": \"bar\"", "\"system\": \"" + ca.uhn.fhir.rest.api.Constants.TAG_SUBSETTED_SYSTEM_DSTU3 + "\"", "\"code\": \"" + ca.uhn.fhir.rest.api.Constants.TAG_SUBSETTED_CODE + "\"")); assertThat(encoded, not(containsString("THE DIV"))); assertThat(encoded, containsString("family")); assertThat(encoded, not(containsString("maritalStatus"))); } /** * See #205 */ @Test public void testEncodeTags() { org.hl7.fhir.dstu2016may.model.Patient pt = new org.hl7.fhir.dstu2016may.model.Patient(); pt.addIdentifier().setSystem("sys").setValue("val"); pt.getMeta().addTag().setSystem("scheme").setCode("term").setDisplay("display"); String enc = ourCtx.newJsonParser().encodeResourceToString(pt); ourLog.info(enc); assertEquals("{\"resourceType\":\"Patient\",\"meta\":{\"tag\":[{\"system\":\"scheme\",\"code\":\"term\",\"display\":\"display\"}]},\"identifier\":[{\"system\":\"sys\",\"value\":\"val\"}]}", enc); } /** * See #241 */ @Test public void testEncodeThenParseShouldNotAddSpuriousId() throws Exception { org.hl7.fhir.dstu2016may.model.Condition condition = new org.hl7.fhir.dstu2016may.model.Condition().setVerificationStatus(org.hl7.fhir.dstu2016may.model.Condition.ConditionVerificationStatus.CONFIRMED); org.hl7.fhir.dstu2016may.model.Bundle bundle = new org.hl7.fhir.dstu2016may.model.Bundle(); org.hl7.fhir.dstu2016may.model.Bundle.BundleEntryComponent entry = new org.hl7.fhir.dstu2016may.model.Bundle.BundleEntryComponent(); entry.setId("123"); entry.setResource(condition); bundle.getEntry().add(entry); IParser parser = ourCtx.newJsonParser(); String json = parser.encodeResourceToString(bundle); ourLog.info(json); bundle = (org.hl7.fhir.dstu2016may.model.Bundle) parser.parseResource(json); assertEquals("123", bundle.getEntry().get(0).getId()); condition = (org.hl7.fhir.dstu2016may.model.Condition) bundle.getEntry().get(0).getResource(); assertEquals(null, condition.getId()); } @Test public void testEncodeWithDontEncodeElements() throws Exception { org.hl7.fhir.dstu2016may.model.Patient patient = new org.hl7.fhir.dstu2016may.model.Patient(); patient.setId("123"); patient.getMeta().addProfile(("http://profile")); patient.addName().addFamily("FAMILY").addGiven("GIVEN"); patient.addAddress().addLine("LINE1"); { IParser p = ourCtx.newJsonParser(); p.setDontEncodeElements(Sets.newHashSet("*.meta", "*.id")); p.setPrettyPrint(true); String out = p.encodeResourceToString(patient); ourLog.info(out); assertThat(out, containsString("Patient")); assertThat(out, containsString("name")); assertThat(out, containsString("address")); assertThat(out, not(containsString("id"))); assertThat(out, not(containsString("meta"))); } { IParser p = ourCtx.newJsonParser(); p.setDontEncodeElements(Sets.newHashSet("Patient.meta", "Patient.id")); p.setPrettyPrint(true); String out = p.encodeResourceToString(patient); ourLog.info(out); assertThat(out, containsString("Patient")); assertThat(out, containsString("name")); assertThat(out, containsString("address")); assertThat(out, not(containsString("id"))); assertThat(out, not(containsString("meta"))); } { IParser p = ourCtx.newJsonParser(); p.setDontEncodeElements(Sets.newHashSet("Patient.name.family")); p.setPrettyPrint(true); String out = p.encodeResourceToString(patient); ourLog.info(out); assertThat(out, containsString("GIVEN")); assertThat(out, not(containsString("FAMILY"))); } { IParser p = ourCtx.newJsonParser(); p.setDontEncodeElements(Sets.newHashSet("*.meta", "*.id")); p.setPrettyPrint(true); String out = p.encodeResourceToString(patient); ourLog.info(out); assertThat(out, containsString("Patient")); assertThat(out, containsString("name")); assertThat(out, containsString("address")); assertThat(out, not(containsString("id"))); assertThat(out, not(containsString("meta"))); } { IParser p = ourCtx.newJsonParser(); p.setDontEncodeElements(Sets.newHashSet("Patient.meta")); p.setEncodeElements(new HashSet<String>(Arrays.asList("Patient.name"))); p.setPrettyPrint(true); String out = p.encodeResourceToString(patient); ourLog.info(out); assertThat(out, containsString("Patient")); assertThat(out, containsString("name")); assertThat(out, containsString("id")); assertThat(out, not(containsString("address"))); } } @Test public void testEncodingNullExtension() { org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); org.hl7.fhir.dstu2016may.model.Extension extension = new org.hl7.fhir.dstu2016may.model.Extension("http://foo#bar"); p.addExtension(extension); String str = ourCtx.newJsonParser().encodeResourceToString(p); assertEquals("{\"resourceType\":\"Patient\"}", str); extension.setValue(new org.hl7.fhir.dstu2016may.model.StringType()); str = ourCtx.newJsonParser().encodeResourceToString(p); assertEquals("{\"resourceType\":\"Patient\"}", str); extension.setValue(new org.hl7.fhir.dstu2016may.model.StringType("")); str = ourCtx.newJsonParser().encodeResourceToString(p); assertEquals("{\"resourceType\":\"Patient\"}", str); } @Test public void testExponentDoesntGetEncodedAsSuch() { org.hl7.fhir.dstu2016may.model.Observation obs = new org.hl7.fhir.dstu2016may.model.Observation(); obs.setValue(new org.hl7.fhir.dstu2016may.model.Quantity().setValue(new BigDecimal("0.000000000000000100"))); String str = ourCtx.newJsonParser().encodeResourceToString(obs); ourLog.info(str); assertEquals("{\"resourceType\":\"Observation\",\"valueQuantity\":{\"value\":0.000000000000000100}}", str); } @Test public void testExponentParseWorks() { String input = "{\"resourceType\":\"Observation\",\"valueQuantity\":{\"value\":0.0000000000000001}}"; org.hl7.fhir.dstu2016may.model.Observation obs = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Observation.class, input); assertEquals("0.0000000000000001", ((org.hl7.fhir.dstu2016may.model.Quantity) obs.getValue()).getValueElement().getValueAsString()); String str = ourCtx.newJsonParser().encodeResourceToString(obs); ourLog.info(str); assertEquals("{\"resourceType\":\"Observation\",\"valueQuantity\":{\"value\":0.0000000000000001}}", str); } /** * #516 */ @Test(expected = DataFormatException.class) public void testInvalidEnumValue() { String res = "{ \"resourceType\": \"ValueSet\", \"url\": \"http://sample/ValueSet/education-levels\", \"version\": \"1\", \"name\": \"Education Levels\", \"status\": \"draft\", \"compose\": { \"include\": [ { \"filter\": [ { \"property\": \"n\", \"op\": \"n\", \"value\": \"365460000\" } ], \"system\": \"http://snomed.info/sct\" } ], \"exclude\": [ { \"concept\": [ { \"code\": \"224298008\" }, { \"code\": \"365460000\" }, { \"code\": \"473462005\" }, { \"code\": \"424587006\" } ], \"system\": \"http://snomed.info/sct\" } ] }, \"description\": \"A selection of Education Levels\", \"text\": { \"status\": \"generated\", \"div\": \"<div xmlns=\\\"http://www.w3.org/1999/xhtml\\\"><h2>Education Levels</h2><tt>http://csiro.au/ValueSet/education-levels</tt><p>A selection of Education Levels</p></div>\" }, \"experimental\": true, \"date\": \"2016-07-26\" }"; IParser parser = ourCtx.newJsonParser(); parser.setParserErrorHandler(new StrictErrorHandler()); org.hl7.fhir.dstu2016may.model.ValueSet parsed = parser.parseResource(org.hl7.fhir.dstu2016may.model.ValueSet.class, res); fail("DataFormat Invalid attribute exception should be thrown"); } /** * #65 */ @Test public void testJsonPrimitiveWithExtensionEncoding() { org.hl7.fhir.dstu2016may.model.QuestionnaireResponse parsed = new org.hl7.fhir.dstu2016may.model.QuestionnaireResponse(); parsed.addItem().setLinkId("value123"); parsed.getItem().get(0).getLinkIdElement().addExtension(new org.hl7.fhir.dstu2016may.model.Extension("http://123", new org.hl7.fhir.dstu2016may.model.StringType("HELLO"))); String encoded = ourCtx.newJsonParser().setPrettyPrint(false).encodeResourceToString(parsed); ourLog.info(encoded); assertThat(encoded, containsString("{\"linkId\":\"value123\",\"_linkId\":{\"extension\":[{\"url\":\"http://123\",\"valueString\":\"HELLO\"}]}}")); } @Test public void testLinkage() { org.hl7.fhir.dstu2016may.model.Linkage l = new org.hl7.fhir.dstu2016may.model.Linkage(); l.addItem().getResource().setDisplay("FOO"); String out = ourCtx.newXmlParser().encodeResourceToString(l); ourLog.info(out); assertEquals("<Linkage xmlns=\"http://hl7.org/fhir\"><item><resource><display value=\"FOO\"/></resource></item></Linkage>", out); } @Test public void testOmitResourceId() { org.hl7.fhir.dstu2016may.model.Patient p = new org.hl7.fhir.dstu2016may.model.Patient(); p.setId("123"); p.addName().addFamily("ABC"); assertThat(ourCtx.newJsonParser().encodeResourceToString(p), stringContainsInOrder("123", "ABC")); assertThat(ourCtx.newJsonParser().setOmitResourceId(true).encodeResourceToString(p), containsString("ABC")); assertThat(ourCtx.newJsonParser().setOmitResourceId(true).encodeResourceToString(p), not(containsString("123"))); } @Test public void testParseAndEncodeBundleWithUuidBase() { //@formatter:off String input = "{\n" + " \"resourceType\":\"Bundle\",\n" + " \"type\":\"document\",\n" + " \"entry\":[\n" + " {\n" + " \"fullUrl\":\"urn:uuid:180f219f-97a8-486d-99d9-ed631fe4fc57\",\n" + " \"resource\":{\n" + " \"resourceType\":\"Composition\",\n" + " \"id\":\"180f219f-97a8-486d-99d9-ed631fe4fc57\",\n" + " \"meta\":{\n" + " \"lastUpdated\":\"2013-05-28T22:12:21Z\"\n" + " },\n" + " \"text\":{\n" + " \"status\":\"generated\",\n" + " \"div\":\"<div xmlns=\\\"http://www.w3.org/1999/xhtml\\\"><p><b>Generated Narrative with Details</b></p><p><b>id</b>: 180f219f-97a8-486d-99d9-ed631fe4fc57</p><p><b>meta</b>: </p><p><b>date</b>: Feb 1, 2013 12:30:02 PM</p><p><b>type</b>: Discharge Summary from Responsible Clinician <span>(Details : {LOINC code '28655-9' = 'Physician attending Discharge summary)</span></p><p><b>status</b>: final</p><p><b>confidentiality</b>: N</p><p><b>author</b>: <a>Doctor Dave. Generated Summary: 23; Adam Careful </a></p><p><b>encounter</b>: <a>http://fhir.healthintersections.com.au/open/Encounter/doc-example</a></p></div>\"\n" + " },\n" + " \"date\":\"2013-02-01T12:30:02Z\",\n" + " \"type\":{\n" + " \"coding\":[\n" + " {\n" + " \"system\":\"http://loinc.org\",\n" + " \"code\":\"28655-9\"\n" + " }\n" + " ],\n" + " \"text\":\"Discharge Summary from Responsible Clinician\"\n" + " },\n" + " \"status\":\"final\",\n" + " \"confidentiality\":\"N\",\n" + " \"subject\":{\n" + " \"reference\":\"http://fhir.healthintersections.com.au/open/Patient/d1\",\n" + " \"display\":\"Eve Everywoman\"\n" + " },\n" + " \"author\":[\n" + " {\n" + " \"reference\":\"Practitioner/example\",\n" + " \"display\":\"Doctor Dave\"\n" + " }\n" + " ],\n" + " \"encounter\":{\n" + " \"reference\":\"http://fhir.healthintersections.com.au/open/Encounter/doc-example\"\n" + " },\n" + " \"section\":[\n" + " {\n" + " \"title\":\"Reason for admission\",\n" + " \"content\":{\n" + " \"reference\":\"urn:uuid:d0dd51d3-3ab2-4c84-b697-a630c3e40e7a\"\n" + " }\n" + " },\n" + " {\n" + " \"title\":\"Medications on Discharge\",\n" + " \"content\":{\n" + " \"reference\":\"urn:uuid:673f8db5-0ffd-4395-9657-6da00420bbc1\"\n" + " }\n" + " },\n" + " {\n" + " \"title\":\"Known allergies\",\n" + " \"content\":{\n" + " \"reference\":\"urn:uuid:68f86194-e6e1-4f65-b64a-5314256f8d7b\"\n" + " }\n" + " }\n" + " ]\n" + " }\n" + " }" + " ]" + "}"; //@formatter:on org.hl7.fhir.dstu2016may.model.Bundle parsed = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Bundle.class, input); String encoded = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(parsed); ourLog.info(encoded); assertEquals("urn:uuid:180f219f-97a8-486d-99d9-ed631fe4fc57", parsed.getEntry().get(0).getResource().getIdElement().getValue()); assertEquals(null, parsed.getEntry().get(0).getResource().getIdElement().getBaseUrl()); assertEquals("urn:uuid:180f219f-97a8-486d-99d9-ed631fe4fc57", parsed.getEntry().get(0).getResource().getIdElement().getIdPart()); assertThat(encoded, not(containsString("\"id\":\"180f219f-97a8-486d-99d9-ed631fe4fc57\""))); } @Test public void testParseAndEncodeComments() { //@formatter:off String input = "{\n" + " \"resourceType\": \"Patient\",\n" + " \"id\": \"pat1\",\n" + " \"text\": {\n" + " \"status\": \"generated\",\n" + " \"div\": \"<div>\\n \\n <p>Patient Donald DUCK @ Acme Healthcare, Inc. MR = 654321</p>\\n \\n </div>\"\n" + " },\n" + " \"identifier\": [\n" + " {\n" + " \"fhir_comments\":[\"identifier comment 1\",\"identifier comment 2\"],\n" + " \"use\": \"usual\",\n" + " \"_use\": {\n" + " \"fhir_comments\":[\"use comment 1\",\"use comment 2\"]\n" + " },\n" + " \"type\": {\n" + " \"coding\": [\n" + " {\n" + " \"system\": \"http://hl7.org/fhir/v2/0203\",\n" + " \"code\": \"MR\"\n" + " }\n" + " ]\n" + " },\n" + " \"system\": \"urn:oid:0.1.2.3.4.5.6.7\",\n" + " \"value\": \"654321\"\n" + " }\n" + " ],\n" + " \"active\": true" + "}"; //@formatter:off org.hl7.fhir.dstu2016may.model.Patient res = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Patient.class, input); res.getFormatCommentsPre(); assertEquals("Patient/pat1", res.getId()); assertEquals("654321", res.getIdentifier().get(0).getValue()); assertEquals(true, res.getActive()); assertThat(res.getIdentifier().get(0).getFormatCommentsPre(), contains("identifier comment 1", "identifier comment 2")); assertThat(res.getIdentifier().get(0).getUseElement().getFormatCommentsPre(), contains("use comment 1", "use comment 2")); String encoded = ourCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(res); ourLog.info(encoded); //@formatter:off assertThat(encoded, stringContainsInOrder( "\"identifier\": [", "{", "\"fhir_comments\":", "[", "\"identifier comment 1\"", ",", "\"identifier comment 2\"", "]", "\"use\": \"usual\",", "\"_use\": {", "\"fhir_comments\":", "[", "\"use comment 1\"", ",", "\"use comment 2\"", "]", "},", "\"type\"" )); //@formatter:off } @Test public void testParseBundleWithBinary() { org.hl7.fhir.dstu2016may.model.Binary patient = new org.hl7.fhir.dstu2016may.model.Binary(); patient.setId(new org.hl7.fhir.dstu2016may.model.IdType("http://base/Binary/11/_history/22")); patient.setContentType("foo"); patient.setContent(new byte[] { 1, 2, 3, 4 }); String val = ourCtx.newJsonParser().encodeResourceToString(patient); String expected = "{\"resourceType\":\"Binary\",\"id\":\"11\",\"meta\":{\"versionId\":\"22\"},\"contentType\":\"foo\",\"content\":\"AQIDBA==\"}"; ourLog.info("Expected: {}", expected); ourLog.info("Actual : {}", val); assertEquals(expected, val); } /** * See #342 */ @Test() public void testParseInvalid() { try { ourCtx.newJsonParser().parseResource("FOO"); fail(); } catch (DataFormatException e) { assertEquals("Failed to parse JSON content, error was: Content does not appear to be FHIR JSON, first non-whitespace character was: 'F' (must be '{')", e.getMessage()); } try { ourCtx.newJsonParser().parseResource("[\"aaa\"]"); fail(); } catch (DataFormatException e) { assertEquals("Failed to parse JSON content, error was: Content does not appear to be FHIR JSON, first non-whitespace character was: '[' (must be '{')", e.getMessage()); } assertEquals(org.hl7.fhir.dstu2016may.model.Bundle.class, ourCtx.newJsonParser().parseResource(" {\"resourceType\" : \"Bundle\"}").getClass()); } /** * See #414 */ @Test public void testParseJsonExtensionWithoutUrl() { //@formatter:off String input = "{\"resourceType\":\"Patient\"," + "\"extension\":[ {\"valueDateTime\":\"2011-01-02T11:13:15\"} ]" + "}"; //@formatter:on IParser parser = ourCtx.newJsonParser(); parser.setParserErrorHandler(new LenientErrorHandler()); org.hl7.fhir.dstu2016may.model.Patient parsed = (org.hl7.fhir.dstu2016may.model.Patient) parser.parseResource(input); assertEquals(1, parsed.getExtension().size()); assertEquals(null, parsed.getExtension().get(0).getUrl()); assertEquals("2011-01-02T11:13:15", ((PrimitiveType<?>)parsed.getExtension().get(0).getValue()).getValueAsString()); try { parser = ourCtx.newJsonParser(); parser.setParserErrorHandler(new StrictErrorHandler()); parser.parseResource(input); fail(); } catch (DataFormatException e) { assertEquals("Resource is missing required element 'url' in parent element 'extension'", e.getMessage()); } } /** * See #414 */ @Test public void testParseJsonModifierExtensionWithoutUrl() { //@formatter:off String input = "{\"resourceType\":\"Patient\"," + "\"modifierExtension\":[ {\"valueDateTime\":\"2011-01-02T11:13:15\"} ]" + "}"; //@formatter:on IParser parser = ourCtx.newJsonParser(); parser.setParserErrorHandler(new LenientErrorHandler()); org.hl7.fhir.dstu2016may.model.Patient parsed = (org.hl7.fhir.dstu2016may.model.Patient) parser.parseResource(input); assertEquals(1, parsed.getModifierExtension().size()); assertEquals(null, parsed.getModifierExtension().get(0).getUrl()); assertEquals("2011-01-02T11:13:15", ((PrimitiveType<?>)parsed.getModifierExtension().get(0).getValue()).getValueAsString()); try { parser = ourCtx.newJsonParser(); parser.setParserErrorHandler(new StrictErrorHandler()); parser.parseResource(input); fail(); } catch (DataFormatException e) { assertEquals("Resource is missing required element 'url' in parent element 'modifierExtension'", e.getMessage()); } } /** * See #484 */ @Test public void testParseNarrativeWithEmptyDiv() { String input = "{\"resourceType\":\"Basic\",\"id\":\"1\",\"text\":{\"status\":\"generated\",\"div\":\"<div/>\"}}"; org.hl7.fhir.dstu2016may.model.Basic basic = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Basic.class, input); assertEquals(null, basic.getText().getDivAsString()); input = "{\"resourceType\":\"Basic\",\"id\":\"1\",\"text\":{\"status\":\"generated\",\"div\":\"<div></div>\"}}"; basic = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Basic.class, input); assertEquals(null, basic.getText().getDivAsString()); input = "{\"resourceType\":\"Basic\",\"id\":\"1\",\"text\":{\"status\":\"generated\",\"div\":\"<div> </div>\"}}"; basic = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Basic.class, input); assertEquals("<div xmlns=\"http://www.w3.org/1999/xhtml\"> </div>", basic.getText().getDivAsString()); } /** * See #163 */ @Test public void testParseResourceType() { IParser jsonParser = ourCtx.newJsonParser().setPrettyPrint(true); // Patient org.hl7.fhir.dstu2016may.model.Patient patient = new org.hl7.fhir.dstu2016may.model.Patient(); String patientId = UUID.randomUUID().toString(); patient.setId(new org.hl7.fhir.dstu2016may.model.IdType("Patient", patientId)); patient.addName().addGiven("John").addFamily("Smith"); patient.setGender(org.hl7.fhir.dstu2016may.model.Enumerations.AdministrativeGender.MALE); patient.setBirthDateElement(new org.hl7.fhir.dstu2016may.model.DateType("1987-04-16")); // Bundle org.hl7.fhir.dstu2016may.model.Bundle bundle = new org.hl7.fhir.dstu2016may.model.Bundle(); bundle.setType(org.hl7.fhir.dstu2016may.model.Bundle.BundleType.COLLECTION); bundle.addEntry().setResource(patient); String bundleText = jsonParser.encodeResourceToString(bundle); ourLog.info(bundleText); org.hl7.fhir.dstu2016may.model.Bundle reincarnatedBundle = jsonParser.parseResource(org.hl7.fhir.dstu2016may.model.Bundle.class, bundleText); org.hl7.fhir.dstu2016may.model.Patient reincarnatedPatient = (org.hl7.fhir.dstu2016may.model.Patient) reincarnatedBundle.getEntry().get(0).getResource(); assertEquals("Patient", patient.getIdElement().getResourceType()); assertEquals("Patient", reincarnatedPatient.getIdElement().getResourceType()); } /** * See #207 */ @Test public void testParseResourceWithInvalidType() { String input = "{" + "\"resourceType\":\"Patient\"," + "\"contained\":[" + " {" + " \"rezType\":\"Organization\"" + " }" + " ]" + "}"; IParser jsonParser = ourCtx.newJsonParser().setPrettyPrint(true); try { jsonParser.parseResource(input); fail(); } catch (DataFormatException e) { assertEquals("Missing required element 'resourceType' from JSON resource object, unable to parse", e.getMessage()); } } /** * See #344 */ @Test public void testParserIsCaseSensitive() { org.hl7.fhir.dstu2016may.model.Observation obs = new org.hl7.fhir.dstu2016may.model.Observation(); org.hl7.fhir.dstu2016may.model.SampledData data = new org.hl7.fhir.dstu2016may.model.SampledData(); data.setData("1 2 3"); data.setOrigin((org.hl7.fhir.dstu2016may.model.SimpleQuantity) new org.hl7.fhir.dstu2016may.model.SimpleQuantity().setValue(0L)); data.setPeriod(1000L); obs.setValue(data); IParser p = ourCtx.newJsonParser().setPrettyPrint(true).setParserErrorHandler(new StrictErrorHandler()); String encoded = p.encodeResourceToString(obs); ourLog.info(encoded); p.parseResource(encoded); try { p.parseResource(encoded.replace("Observation", "observation")); fail(); } catch (DataFormatException e) { assertEquals("Unknown resource type 'observation': Resource names are case sensitive, found similar name: 'Observation'", e.getMessage()); } try { p.parseResource(encoded.replace("valueSampledData", "valueSampleddata")); fail(); } catch (DataFormatException e) { assertEquals("Unknown element 'valueSampleddata' found during parse", e.getMessage()); } } @Test public void testParseWithPrecision() { String input = "{\"resourceType\":\"Observation\",\"valueQuantity\":{\"value\":0.000000000000000100}}"; org.hl7.fhir.dstu2016may.model.Observation obs = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Observation.class, input); org.hl7.fhir.dstu2016may.model.DecimalType valueElement = ((org.hl7.fhir.dstu2016may.model.Quantity) obs.getValue()).getValueElement(); assertEquals("0.000000000000000100", valueElement.getValueAsString()); String str = ourCtx.newJsonParser().encodeResourceToString(obs); ourLog.info(str); assertEquals("{\"resourceType\":\"Observation\",\"valueQuantity\":{\"value\":0.000000000000000100}}", str); } @Test(expected = DataFormatException.class) public void testParseWithTrailingContent() throws Exception { //@formatter:off String bundle = "{\n" + " \"resourceType\" : \"Bundle\",\n" + " \"total\" : 1\n" + "}}"; //@formatter:on org.hl7.fhir.dstu2016may.model.Bundle b = ourCtx.newJsonParser().parseResource(org.hl7.fhir.dstu2016may.model.Bundle.class, bundle); } @Test public void testBaseUrlFooResourceCorrectlySerializedInExtensionValueReference() { String refVal = "http://my.org/FooBar"; org.hl7.fhir.dstu2016may.model.Patient fhirPat = new org.hl7.fhir.dstu2016may.model.Patient(); fhirPat.addExtension().setUrl("x1").setValue(new org.hl7.fhir.dstu2016may.model.Reference(refVal)); IParser parser = ourCtx.newJsonParser(); String output = parser.encodeResourceToString(fhirPat); System.out.println("output: " + output); // Deserialize then check that valueReference value is still correct fhirPat = parser.parseResource(org.hl7.fhir.dstu2016may.model.Patient.class, output); List<org.hl7.fhir.dstu2016may.model.Extension> extlst = fhirPat.getExtensionsByUrl("x1"); assertEquals(1, extlst.size()); assertEquals(refVal, ((org.hl7.fhir.dstu2016may.model.Reference) extlst.get(0).getValue()).getReference()); } private Matcher<? super String> stringContainsInOrder(java.lang.String... substrings) { return Matchers.stringContainsInOrder(Arrays.asList(substrings)); } }
package io.jenkins.blueocean.blueocean_bitbucket_pipeline; import com.cloudbees.jenkins.plugins.bitbucket.BitbucketSCMSource; import com.cloudbees.jenkins.plugins.bitbucket.endpoints.BitbucketEndpointConfiguration; import com.cloudbees.plugins.credentials.common.StandardUsernamePasswordCredentials; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import hudson.model.Item; import hudson.model.User; import io.jenkins.blueocean.blueocean_bitbucket_pipeline.cloud.BitbucketCloudScm; import io.jenkins.blueocean.blueocean_bitbucket_pipeline.model.BbBranch; import io.jenkins.blueocean.blueocean_bitbucket_pipeline.model.BbSaveContentResponse; import io.jenkins.blueocean.blueocean_bitbucket_pipeline.server.BitbucketServerScm; import io.jenkins.blueocean.commons.ErrorMessage; import io.jenkins.blueocean.commons.ServiceException; import io.jenkins.blueocean.rest.factory.organization.OrganizationFactory; import io.jenkins.blueocean.rest.impl.pipeline.scm.AbstractScmContentProvider; import io.jenkins.blueocean.rest.impl.pipeline.scm.GitContent; import io.jenkins.blueocean.rest.impl.pipeline.scm.ScmContentProviderParams; import io.jenkins.blueocean.rest.impl.pipeline.scm.ScmFile; import io.jenkins.blueocean.rest.model.BlueOrganization; import jenkins.branch.MultiBranchProject; import jenkins.scm.api.SCMNavigator; import jenkins.scm.api.SCMSource; import net.sf.json.JSONObject; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.StaplerRequest; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.List; import java.util.Objects; /** * @author Vivek Pandey */ public abstract class AbstractBitbucketScmContentProvider extends AbstractScmContentProvider { @Override protected Object getContent(ScmGetRequest request) { BitbucketApi api = BitbucketServerScm.getApi(request.getApiUrl(), this.getScmId(), request.getCredentials()); BbBranch branch=null; String branchName = request.getBranch(); BbBranch defaultBranch = api.getDefaultBranch(request.getOwner(), request.getRepo()); if(defaultBranch == null){ //empty repo throw new ServiceException.NotFoundException(request.getPath()+ " not found. This is empty and un-initialized repository"); } if(branchName == null){ branch = defaultBranch; } if(branchName != null){ branch = api.getBranch(request.getOwner(), request.getRepo(), branchName); //Given branchName create this branch if(branch == null ){ throw new ServiceException.BadRequestException("branch: "+branchName + " not found"); } } String content = api.getContent(request.getOwner(), request.getRepo(), request.getPath(), branch.getLatestCommit()); final GitContent gitContent = new GitContent.Builder() .base64Data(Base64.getEncoder().encodeToString(content.getBytes( StandardCharsets.UTF_8))) .branch(request.getBranch()) .size(content.length()) .path(request.getPath()) .owner(request.getOwner()) .repo(request.getRepo()) .name(request.getPath()) //we use commitId as sha value - bitbucket doesn't use content sha to detect collision .sha(branch.getLatestCommit()) .commitId(branch.getLatestCommit()) .build(); return new ScmFile<GitContent>() { @Override public GitContent getContent() { return gitContent; } }; } @Override protected ScmContentProviderParams getScmParamsFromItem(Item item) { return new BitbucketScmParams(item); } @Override public Object saveContent(@NonNull StaplerRequest staplerRequest, @NonNull Item item) { JSONObject body; try { body = JSONObject.fromObject(IOUtils.toString(staplerRequest.getReader())); } catch (IOException e) { throw new ServiceException.UnexpectedErrorException("Failed to read request body"); } BitbucketScmSaveFileRequest scmSaveFileRequest = staplerRequest.bindJSON(BitbucketScmSaveFileRequest.class, body); if(scmSaveFileRequest == null){ throw new ServiceException.BadRequestException(new ErrorMessage(400, "Failed to bind request")); } GitContent gitContent = scmSaveFileRequest.getContent(); BitbucketScmParams scmParamsFromItem = new BitbucketScmParams(item); String owner = scmParamsFromItem.getOwner(); String repo = scmParamsFromItem.getRepo(); String commitId = StringUtils.isNotBlank(gitContent.getCommitId()) ? gitContent.getCommitId() : gitContent.getSha(); BitbucketApi api = BitbucketServerScm.getApi(scmParamsFromItem.getApiUrl(), this.getScmId(), scmParamsFromItem.getCredentials()); String content = new String(Base64.getDecoder().decode(gitContent.getBase64Data()), StandardCharsets.UTF_8); String message = gitContent.getMessage(); if(message == null){ message = gitContent.getPath()+" created with BlueOcean"; } BbSaveContentResponse response = api.saveContent(owner,repo,gitContent.getPath(),content, message, gitContent.getBranch(), gitContent.getSourceBranch(), commitId); final GitContent respContent = new GitContent.Builder() .branch(gitContent.getBranch()) .path(gitContent.getPath()) .owner(gitContent.getOwner()) .repo(gitContent.getRepo()) .sha(response.getCommitId()) .name(gitContent.getPath()) .commitId(response.getCommitId()) .build(); return new ScmFile<GitContent>() { @Override public GitContent getContent() { return respContent; } }; } @SuppressWarnings("unchecked") @CheckForNull protected BitbucketSCMSource getSourceFromItem(@NonNull Item item) { if (item instanceof MultiBranchProject) { List<SCMSource> sources = ((MultiBranchProject) item).getSCMSources(); if (!sources.isEmpty() && sources.get(0) instanceof BitbucketSCMSource) { return (BitbucketSCMSource) sources.get(0); } } return null; } static class BitbucketScmParams extends ScmContentProviderParams { public BitbucketScmParams(Item item) { super(item); } @Override protected String owner(@NonNull SCMSource scmSource) { if (scmSource instanceof BitbucketSCMSource) { BitbucketSCMSource bitbucketSCMSource = (BitbucketSCMSource) scmSource; return bitbucketSCMSource.getRepoOwner(); } return null; } @Override protected String owner(@NonNull SCMNavigator scmNavigator) { return null; } @Override protected String repo(@NonNull SCMSource scmSource) { if (scmSource instanceof BitbucketSCMSource) { BitbucketSCMSource bitbucketSCMSource = (BitbucketSCMSource) scmSource; return bitbucketSCMSource.getRepository(); } return null; } @Override protected String apiUrl(@NonNull SCMSource scmSource) { if (scmSource instanceof BitbucketSCMSource) { return ((BitbucketSCMSource)scmSource).getServerUrl(); } return null; } @Override protected String apiUrl(@NonNull SCMNavigator scmNavigator) { return null; } @Override @NonNull protected StandardUsernamePasswordCredentials getCredentialForUser(@NonNull final Item item, @NonNull String apiUrl){ User user = User.current(); if(user == null){ //ensure this session has authenticated user throw new ServiceException.UnauthorizedException("No logged in user found"); } StaplerRequest request = Stapler.getCurrentRequest(); String scmId = request.getParameter("scmId"); //get credential for this user AbstractBitbucketScm scm; final BlueOrganization organization = OrganizationFactory.getInstance().getContainingOrg(item); if(BitbucketEndpointConfiguration.normalizeServerUrl(apiUrl) .startsWith(BitbucketEndpointConfiguration.normalizeServerUrl(BitbucketCloudScm.API_URL)) //tests might add scmId to indicate which Scm should be used to find credential //We have to do this because apiUrl might be of WireMock server and not Github || (StringUtils.isNotBlank(scmId) && scmId.equals(BitbucketCloudScm.ID))) { scm = new BitbucketCloudScm( () -> { Objects.requireNonNull(organization); return organization.getLink().rel("scm"); } ); }else{ //server scm = new BitbucketServerScm(( () -> { Objects.requireNonNull(organization); return organization.getLink().rel("scm"); } )); } //pick up github credential from user's store StandardUsernamePasswordCredentials credential = scm.getCredential(BitbucketEndpointConfiguration.normalizeServerUrl(apiUrl)); if(credential == null){ throw new ServiceException.PreconditionRequired("Can't access content from Bitbucket: no credential found"); } return credential; } } }
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.openqa.selenium.support.ui.ExpectedConditions.titleIs; import static org.openqa.selenium.testing.Driver.CHROME; import static org.openqa.selenium.testing.Driver.HTMLUNIT; import static org.openqa.selenium.testing.Driver.IE; import static org.openqa.selenium.testing.Driver.MARIONETTE; import static org.openqa.selenium.testing.Driver.PHANTOMJS; import static org.openqa.selenium.testing.Driver.SAFARI; import org.junit.Test; import org.openqa.selenium.interactions.MoveTargetOutOfBoundsException; import org.openqa.selenium.testing.Ignore; import org.openqa.selenium.testing.JUnit4TestBase; import org.openqa.selenium.testing.JavascriptEnabled; import org.openqa.selenium.testing.SwitchToTopAfterTest; @Ignore(value = {HTMLUNIT}, reason = "HtmlUnit: Scrolling requires rendering") public class ClickScrollingTest extends JUnit4TestBase { @JavascriptEnabled @Test public void testClickingOnAnchorScrollsPage() { String scrollScript = ""; scrollScript += "var pageY;"; scrollScript += "if (typeof(window.pageYOffset) == 'number') {"; scrollScript += " pageY = window.pageYOffset;"; scrollScript += "} else {"; scrollScript += " pageY = document.documentElement.scrollTop;"; scrollScript += "}"; scrollScript += "return pageY;"; driver.get(pages.macbethPage); driver.findElement(By.partialLinkText("last speech")).click(); long yOffset = (Long) ((JavascriptExecutor) driver) .executeScript(scrollScript); // Focusing on to click, but not actually following, // the link will scroll it in to view, which is a few pixels further than 0 assertThat("Did not scroll", yOffset, is(greaterThan(300L))); } @Test public void testShouldScrollToClickOnAnElementHiddenByOverflow() { String url = appServer.whereIs("click_out_of_bounds_overflow.html"); driver.get(url); WebElement link = driver.findElement(By.id("link")); try { link.click(); } catch (MoveTargetOutOfBoundsException e) { fail("Should not be out of bounds: " + e.getMessage()); } } @Test @Ignore(MARIONETTE) public void testShouldBeAbleToClickOnAnElementHiddenByOverflow() { driver.get(appServer.whereIs("scroll.html")); WebElement link = driver.findElement(By.id("line8")); // This used to throw a MoveTargetOutOfBoundsException - we don't expect it to link.click(); assertEquals("line8", driver.findElement(By.id("clicked")).getText()); } @JavascriptEnabled @Ignore(value = {CHROME}, reason = "Chrome: failed") @Test public void testShouldBeAbleToClickOnAnElementHiddenByDoubleOverflow() { driver.get(appServer.whereIs("scrolling_tests/page_with_double_overflow_auto.html")); driver.findElement(By.id("link")).click(); wait.until(titleIs("Clicked Successfully!")); } @JavascriptEnabled @Ignore(value = {SAFARI}, reason = "Safari: failed") @Test public void testShouldBeAbleToClickOnAnElementHiddenByYOverflow() { driver.get(appServer.whereIs("scrolling_tests/page_with_y_overflow_auto.html")); driver.findElement(By.id("link")).click(); wait.until(titleIs("Clicked Successfully!")); } @JavascriptEnabled @Test public void testShouldNotScrollOverflowElementsWhichAreVisible() { driver.get(appServer.whereIs("scroll2.html")); WebElement list = driver.findElement(By.tagName("ul")); WebElement item = list.findElement(By.id("desired")); item.click(); long yOffset = (Long)((JavascriptExecutor)driver).executeScript("return arguments[0].scrollTop;", list); assertEquals("Should not have scrolled", 0, yOffset); } @JavascriptEnabled @Ignore(value = {CHROME, PHANTOMJS, SAFARI, MARIONETTE}, reason = "Safari: button1 is scrolled to the bottom edge of the view, " + "so additonal scrolling is still required for button2") @Test public void testShouldNotScrollIfAlreadyScrolledAndElementIsInView() { driver.get(appServer.whereIs("scroll3.html")); driver.findElement(By.id("button1")).click(); long scrollTop = getScrollTop(); driver.findElement(By.id("button2")).click(); assertEquals(scrollTop, getScrollTop()); } @Test public void testShouldBeAbleToClickRadioButtonScrolledIntoView() { driver.get(appServer.whereIs("scroll4.html")); driver.findElement(By.id("radio")).click(); // If we don't throw, we're good } @Ignore(value = {IE, MARIONETTE}, reason = "IE has special overflow handling") @Test public void testShouldScrollOverflowElementsIfClickPointIsOutOfViewButElementIsInView() { driver.get(appServer.whereIs("scroll5.html")); driver.findElement(By.id("inner")).click(); assertEquals("clicked", driver.findElement(By.id("clicked")).getText()); } @SwitchToTopAfterTest @Test @Ignore(value = {SAFARI, MARIONETTE}, reason = "others: not tested") public void testShouldBeAbleToClickElementInAFrameThatIsOutOfView() { driver.get(appServer.whereIs("scrolling_tests/page_with_frame_out_of_view.html")); driver.switchTo().frame("frame"); WebElement element = driver.findElement(By.name("checkbox")); element.click(); assertTrue(element.isSelected()); } @SwitchToTopAfterTest @Test @Ignore(value = {SAFARI}, reason = "not tested") public void testShouldBeAbleToClickElementThatIsOutOfViewInAFrame() { driver.get(appServer.whereIs("scrolling_tests/page_with_scrolling_frame.html")); driver.switchTo().frame("scrolling_frame"); WebElement element = driver.findElement(By.name("scroll_checkbox")); element.click(); assertTrue(element.isSelected()); } @SwitchToTopAfterTest @Test(expected = MoveTargetOutOfBoundsException.class) @Ignore(reason = "All tested browses scroll non-scrollable frames") public void testShouldNotBeAbleToClickElementThatIsOutOfViewInANonScrollableFrame() { driver.get(appServer.whereIs("scrolling_tests/page_with_non_scrolling_frame.html")); driver.switchTo().frame("scrolling_frame"); WebElement element = driver.findElement(By.name("scroll_checkbox")); element.click(); } @SwitchToTopAfterTest @Test @Ignore(value = {SAFARI}, reason = "not tested") public void testShouldBeAbleToClickElementThatIsOutOfViewInAFrameThatIsOutOfView() { driver.get(appServer.whereIs("scrolling_tests/page_with_scrolling_frame_out_of_view.html")); driver.switchTo().frame("scrolling_frame"); WebElement element = driver.findElement(By.name("scroll_checkbox")); element.click(); assertTrue(element.isSelected()); } @SwitchToTopAfterTest @Test @Ignore(value = {SAFARI}, reason = "not tested") public void testShouldBeAbleToClickElementThatIsOutOfViewInANestedFrame() { driver.get(appServer.whereIs("scrolling_tests/page_with_nested_scrolling_frames.html")); driver.switchTo().frame("scrolling_frame"); driver.switchTo().frame("nested_scrolling_frame"); WebElement element = driver.findElement(By.name("scroll_checkbox")); element.click(); assertTrue(element.isSelected()); } @SwitchToTopAfterTest @Test @Ignore(value = {SAFARI}, reason = "not tested") public void testShouldBeAbleToClickElementThatIsOutOfViewInANestedFrameThatIsOutOfView() { driver.get(appServer.whereIs("scrolling_tests/page_with_nested_scrolling_frames_out_of_view.html")); driver.switchTo().frame("scrolling_frame"); driver.switchTo().frame("nested_scrolling_frame"); WebElement element = driver.findElement(By.name("scroll_checkbox")); element.click(); assertTrue(element.isSelected()); } @JavascriptEnabled @Test public void testShouldNotScrollWhenGettingElementSize() { driver.get(appServer.whereIs("scroll3.html")); long scrollTop = getScrollTop(); driver.findElement(By.id("button1")).getSize(); assertEquals(scrollTop, getScrollTop()); } private long getScrollTop() { return (Long)((JavascriptExecutor)driver).executeScript("return document.body.scrollTop;"); } @SwitchToTopAfterTest @Test @Ignore(value = {SAFARI, MARIONETTE}, reason = "Not tested") public void testShouldBeAbleToClickElementInATallFrame() { driver.get(appServer.whereIs("scrolling_tests/page_with_tall_frame.html")); driver.switchTo().frame("tall_frame"); WebElement element = driver.findElement(By.name("checkbox")); element.click(); assertTrue(element.isSelected()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jms; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import javax.jms.ConnectionFactory; import org.apache.camel.CamelContext; import org.apache.camel.Exchange; import org.apache.camel.TypeConversionException; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.support.TypeConverterSupport; import org.apache.camel.test.junit4.CamelTestSupport; import org.junit.Test; import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge; import static org.apache.camel.component.jms.JmsConstants.JMS_MESSAGE_TYPE; /** * @version */ public class JmsMessageTypeTest extends CamelTestSupport { protected CamelContext createCamelContext() throws Exception { CamelContext camelContext = super.createCamelContext(); ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory(); camelContext.addComponent("jms", jmsComponentAutoAcknowledge(connectionFactory)); camelContext.getTypeConverterRegistry().addTypeConverter(byte[].class, MyFooBean.class, new MyFooBean()); camelContext.getTypeConverterRegistry().addTypeConverter(String.class, MyFooBean.class, new MyFooBean()); camelContext.getTypeConverterRegistry().addTypeConverter(Map.class, MyFooBean.class, new MyFooBean()); return camelContext; } @Test public void testHeaderTextType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); // we use Text type then it should be a String mock.message(0).body().isInstanceOf(String.class); // we send an object and force it to use Text type template.sendBodyAndHeader("direct:foo", new MyFooBean("World"), JMS_MESSAGE_TYPE, "Text"); assertMockEndpointsSatisfied(); } @Test public void testConvertTextType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); // we use Text type then it should be a String mock.message(0).body().isInstanceOf(String.class); // we send an object and force it to use Text type template.sendBody("direct:text", new MyFooBean("World")); assertMockEndpointsSatisfied(); } @Test public void testTextType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Hello World"); // we use Text type then it should be a String mock.message(0).body().isInstanceOf(String.class); // we send a string and force it to use Text type template.sendBody("direct:text", "Hello World"); assertMockEndpointsSatisfied(); } @Test public void testHeaderBytesType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Bye World".getBytes()); mock.message(0).body().isInstanceOf(byte[].class); // we send an object and force it to use Bytes type template.sendBodyAndHeader("direct:foo", new MyFooBean("World"), JMS_MESSAGE_TYPE, "Bytes"); assertMockEndpointsSatisfied(); } @Test public void testConvertBytesType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Bye World".getBytes()); mock.message(0).body().isInstanceOf(byte[].class); // we send an object and force it to use Bytes type template.sendBody("direct:bytes", new MyFooBean("World")); assertMockEndpointsSatisfied(); } @Test public void testBytesType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedBodiesReceived("Bye World".getBytes()); mock.message(0).body().isInstanceOf(byte[].class); // we send a string and force it to use Bytes type template.sendBody("direct:bytes", "Bye World"); assertMockEndpointsSatisfied(); } @Test public void testHeaderMapType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(Map.class); // we send an object and force it to use Map type template.sendBodyAndHeader("direct:foo", new MyFooBean("Claus"), JMS_MESSAGE_TYPE, "Map"); assertMockEndpointsSatisfied(); assertEquals("Claus", mock.getExchanges().get(0).getIn().getBody(Map.class).get("name")); } @Test public void testConvertMapType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(Map.class); // we send an object and force it to use Map type template.sendBody("direct:map", new MyFooBean("Claus")); assertMockEndpointsSatisfied(); assertEquals("Claus", mock.getExchanges().get(0).getIn().getBody(Map.class).get("name")); } @Test public void testMapType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); mock.message(0).body().isInstanceOf(Map.class); Map<String, Object> body = new HashMap<String, Object>(); body.put("name", "Claus"); // we send a Map object and force it to use Map type template.sendBody("direct:map", body); assertMockEndpointsSatisfied(); assertEquals("Claus", mock.getExchanges().get(0).getIn().getBody(Map.class).get("name")); } @Test public void testHeaderObjectType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); // we use Object type then it should be a MyFooBean object mock.message(0).body().isInstanceOf(MyFooBean.class); // we send an object and force it to use Object type template.sendBodyAndHeader("direct:foo", new MyFooBean("James"), JMS_MESSAGE_TYPE, "Object"); assertMockEndpointsSatisfied(); assertEquals("James", mock.getExchanges().get(0).getIn().getBody(MyFooBean.class).getName()); } @Test public void testObjectType() throws Exception { MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(1); // we use Object type then it should be a MyFooBean object mock.message(0).body().isInstanceOf(MyFooBean.class); // we send an object and force it to use Object type template.sendBody("direct:object", new MyFooBean("James")); assertMockEndpointsSatisfied(); assertEquals("James", mock.getExchanges().get(0).getIn().getBody(MyFooBean.class).getName()); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:text").to("jms:queue:foo?jmsMessageType=Text"); from("direct:bytes").to("jms:queue:foo?jmsMessageType=Bytes"); from("direct:map").to("jms:queue:foo?jmsMessageType=Map"); from("direct:object").to("jms:queue:foo?jmsMessageType=Object"); from("direct:foo").to("jms:queue:foo"); from("jms:queue:foo").to("mock:result"); } }; } public static final class MyFooBean extends TypeConverterSupport implements Serializable { private static final long serialVersionUID = 1L; private String name; private MyFooBean() { } private MyFooBean(String name) { this.name = name; } public String getName() { return name; } @Override @SuppressWarnings("unchecked") public <T> T convertTo(Class<T> type, Exchange exchange, Object value) throws TypeConversionException { if (type.isAssignableFrom(String.class)) { return (T) ("Hello " + ((MyFooBean)value).getName()); } if (type.isAssignableFrom(byte[].class)) { return (T) ("Bye " + ((MyFooBean)value).getName()).getBytes(); } if (type.isAssignableFrom(Map.class)) { Map<String, Object> map = new HashMap<String, Object>(); map.put("name", ((MyFooBean)value).getName()); return (T) map; } return null; } } }
package org.openestate.is24.restapi.xml.common; import java.io.Serializable; import java.net.URI; import java.util.ArrayList; import java.util.List; import javax.annotation.Generated; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import org.jvnet.jaxb2_commons.lang.CopyStrategy2; import org.jvnet.jaxb2_commons.lang.CopyTo2; import org.jvnet.jaxb2_commons.lang.Equals2; import org.jvnet.jaxb2_commons.lang.EqualsStrategy2; import org.jvnet.jaxb2_commons.lang.JAXBCopyStrategy; import org.jvnet.jaxb2_commons.lang.JAXBEqualsStrategy; import org.jvnet.jaxb2_commons.lang.JAXBToStringStrategy; import org.jvnet.jaxb2_commons.lang.ToString2; import org.jvnet.jaxb2_commons.lang.ToStringStrategy2; import org.jvnet.jaxb2_commons.locator.ObjectLocator; import org.jvnet.jaxb2_commons.locator.util.LocatorUtils; import org.openestate.is24.restapi.xml.Adapter4; import org.openestate.is24.restapi.xml.Adapter5; /** * streaming video urls von screen9 * * <p>Java class for VideoInfo complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="VideoInfo"&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="videoUrlList" type="{http://rest.immobilienscout24.de/schema/common/1.0}videoUrlList" maxOccurs="2"/&gt; * &lt;element name="duration" type="{http://www.w3.org/2001/XMLSchema}int" minOccurs="0"/&gt; * &lt;element name="teaserUrl" type="{http://www.w3.org/2001/XMLSchema}anyURI" minOccurs="0"/&gt; * &lt;element name="title" type="{http://rest.immobilienscout24.de/schema/common/1.0}TextField" minOccurs="0"/&gt; * &lt;element name="processingProgress" type="{http://rest.immobilienscout24.de/schema/common/1.0}percentType" minOccurs="0"/&gt; * &lt;element name="processingStatus" type="{http://rest.immobilienscout24.de/schema/common/1.0}videoProcessingStatusType" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "VideoInfo", propOrder = { "videoUrlList", "duration", "teaserUrl", "title", "processingProgress", "processingStatus" }) @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public class VideoInfo implements Serializable, Cloneable, CopyTo2, Equals2, ToString2 { @XmlElement(required = true) @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected List<VideoUrlList> videoUrlList; @XmlElement(type = String.class) @XmlJavaTypeAdapter(Adapter5 .class) @XmlSchemaType(name = "int") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected Long duration; @XmlElement(type = String.class) @XmlJavaTypeAdapter(Adapter4 .class) @XmlSchemaType(name = "anyURI") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected URI teaserUrl; @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected String title; @XmlElement(type = String.class) @XmlJavaTypeAdapter(Adapter3 .class) @XmlSchemaType(name = "int") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected Integer processingProgress; @XmlSchemaType(name = "string") @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") protected VideoProcessingStatusType processingStatus; /** * Gets the value of the videoUrlList property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the videoUrlList property. * * <p> * For example, to add a new item, do as follows: * <pre> * getVideoUrlList().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link VideoUrlList } * * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public List<VideoUrlList> getVideoUrlList() { if (videoUrlList == null) { videoUrlList = new ArrayList<VideoUrlList>(); } return this.videoUrlList; } /** * Gets the value of the duration property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Long getDuration() { return duration; } /** * Sets the value of the duration property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public void setDuration(Long value) { this.duration = value; } /** * Gets the value of the teaserUrl property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public URI getTeaserUrl() { return teaserUrl; } /** * Sets the value of the teaserUrl property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public void setTeaserUrl(URI value) { this.teaserUrl = value; } /** * Gets the value of the title property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public String getTitle() { return title; } /** * Sets the value of the title property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public void setTitle(String value) { this.title = value; } /** * Gets the value of the processingProgress property. * * @return * possible object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Integer getProcessingProgress() { return processingProgress; } /** * Sets the value of the processingProgress property. * * @param value * allowed object is * {@link String } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public void setProcessingProgress(Integer value) { this.processingProgress = value; } /** * Gets the value of the processingStatus property. * * @return * possible object is * {@link VideoProcessingStatusType } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public VideoProcessingStatusType getProcessingStatus() { return processingStatus; } /** * Sets the value of the processingStatus property. * * @param value * allowed object is * {@link VideoProcessingStatusType } * */ @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public void setProcessingStatus(VideoProcessingStatusType value) { this.processingStatus = value; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public String toString() { final ToStringStrategy2 strategy = JAXBToStringStrategy.INSTANCE2; final StringBuilder buffer = new StringBuilder(); append(null, buffer, strategy); return buffer.toString(); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public StringBuilder append(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) { strategy.appendStart(locator, this, buffer); appendFields(locator, buffer, strategy); strategy.appendEnd(locator, this, buffer); return buffer; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public StringBuilder appendFields(ObjectLocator locator, StringBuilder buffer, ToStringStrategy2 strategy) { { List<VideoUrlList> theVideoUrlList; theVideoUrlList = (((this.videoUrlList!= null)&&(!this.videoUrlList.isEmpty()))?this.getVideoUrlList():null); strategy.appendField(locator, this, "videoUrlList", buffer, theVideoUrlList, ((this.videoUrlList!= null)&&(!this.videoUrlList.isEmpty()))); } { Long theDuration; theDuration = this.getDuration(); strategy.appendField(locator, this, "duration", buffer, theDuration, (this.duration!= null)); } { URI theTeaserUrl; theTeaserUrl = this.getTeaserUrl(); strategy.appendField(locator, this, "teaserUrl", buffer, theTeaserUrl, (this.teaserUrl!= null)); } { String theTitle; theTitle = this.getTitle(); strategy.appendField(locator, this, "title", buffer, theTitle, (this.title!= null)); } { Integer theProcessingProgress; theProcessingProgress = this.getProcessingProgress(); strategy.appendField(locator, this, "processingProgress", buffer, theProcessingProgress, (this.processingProgress!= null)); } { VideoProcessingStatusType theProcessingStatus; theProcessingStatus = this.getProcessingStatus(); strategy.appendField(locator, this, "processingStatus", buffer, theProcessingStatus, (this.processingStatus!= null)); } return buffer; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Object clone() { return copyTo(createNewInstance()); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Object copyTo(Object target) { final CopyStrategy2 strategy = JAXBCopyStrategy.INSTANCE2; return copyTo(null, target, strategy); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Object copyTo(ObjectLocator locator, Object target, CopyStrategy2 strategy) { final Object draftCopy = ((target == null)?createNewInstance():target); if (draftCopy instanceof VideoInfo) { final VideoInfo copy = ((VideoInfo) draftCopy); { Boolean videoUrlListShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, ((this.videoUrlList!= null)&&(!this.videoUrlList.isEmpty()))); if (videoUrlListShouldBeCopiedAndSet == Boolean.TRUE) { List<VideoUrlList> sourceVideoUrlList; sourceVideoUrlList = (((this.videoUrlList!= null)&&(!this.videoUrlList.isEmpty()))?this.getVideoUrlList():null); @SuppressWarnings("unchecked") List<VideoUrlList> copyVideoUrlList = ((List<VideoUrlList> ) strategy.copy(LocatorUtils.property(locator, "videoUrlList", sourceVideoUrlList), sourceVideoUrlList, ((this.videoUrlList!= null)&&(!this.videoUrlList.isEmpty())))); copy.videoUrlList = null; if (copyVideoUrlList!= null) { List<VideoUrlList> uniqueVideoUrlListl = copy.getVideoUrlList(); uniqueVideoUrlListl.addAll(copyVideoUrlList); } } else { if (videoUrlListShouldBeCopiedAndSet == Boolean.FALSE) { copy.videoUrlList = null; } } } { Boolean durationShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.duration!= null)); if (durationShouldBeCopiedAndSet == Boolean.TRUE) { Long sourceDuration; sourceDuration = this.getDuration(); Long copyDuration = ((Long) strategy.copy(LocatorUtils.property(locator, "duration", sourceDuration), sourceDuration, (this.duration!= null))); copy.setDuration(copyDuration); } else { if (durationShouldBeCopiedAndSet == Boolean.FALSE) { copy.duration = null; } } } { Boolean teaserUrlShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.teaserUrl!= null)); if (teaserUrlShouldBeCopiedAndSet == Boolean.TRUE) { URI sourceTeaserUrl; sourceTeaserUrl = this.getTeaserUrl(); URI copyTeaserUrl = ((URI) strategy.copy(LocatorUtils.property(locator, "teaserUrl", sourceTeaserUrl), sourceTeaserUrl, (this.teaserUrl!= null))); copy.setTeaserUrl(copyTeaserUrl); } else { if (teaserUrlShouldBeCopiedAndSet == Boolean.FALSE) { copy.teaserUrl = null; } } } { Boolean titleShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.title!= null)); if (titleShouldBeCopiedAndSet == Boolean.TRUE) { String sourceTitle; sourceTitle = this.getTitle(); String copyTitle = ((String) strategy.copy(LocatorUtils.property(locator, "title", sourceTitle), sourceTitle, (this.title!= null))); copy.setTitle(copyTitle); } else { if (titleShouldBeCopiedAndSet == Boolean.FALSE) { copy.title = null; } } } { Boolean processingProgressShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.processingProgress!= null)); if (processingProgressShouldBeCopiedAndSet == Boolean.TRUE) { Integer sourceProcessingProgress; sourceProcessingProgress = this.getProcessingProgress(); Integer copyProcessingProgress = ((Integer) strategy.copy(LocatorUtils.property(locator, "processingProgress", sourceProcessingProgress), sourceProcessingProgress, (this.processingProgress!= null))); copy.setProcessingProgress(copyProcessingProgress); } else { if (processingProgressShouldBeCopiedAndSet == Boolean.FALSE) { copy.processingProgress = null; } } } { Boolean processingStatusShouldBeCopiedAndSet = strategy.shouldBeCopiedAndSet(locator, (this.processingStatus!= null)); if (processingStatusShouldBeCopiedAndSet == Boolean.TRUE) { VideoProcessingStatusType sourceProcessingStatus; sourceProcessingStatus = this.getProcessingStatus(); VideoProcessingStatusType copyProcessingStatus = ((VideoProcessingStatusType) strategy.copy(LocatorUtils.property(locator, "processingStatus", sourceProcessingStatus), sourceProcessingStatus, (this.processingStatus!= null))); copy.setProcessingStatus(copyProcessingStatus); } else { if (processingStatusShouldBeCopiedAndSet == Boolean.FALSE) { copy.processingStatus = null; } } } } return draftCopy; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public Object createNewInstance() { return new VideoInfo(); } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public boolean equals(ObjectLocator thisLocator, ObjectLocator thatLocator, Object object, EqualsStrategy2 strategy) { if ((object == null)||(this.getClass()!= object.getClass())) { return false; } if (this == object) { return true; } final VideoInfo that = ((VideoInfo) object); { List<VideoUrlList> lhsVideoUrlList; lhsVideoUrlList = (((this.videoUrlList!= null)&&(!this.videoUrlList.isEmpty()))?this.getVideoUrlList():null); List<VideoUrlList> rhsVideoUrlList; rhsVideoUrlList = (((that.videoUrlList!= null)&&(!that.videoUrlList.isEmpty()))?that.getVideoUrlList():null); if (!strategy.equals(LocatorUtils.property(thisLocator, "videoUrlList", lhsVideoUrlList), LocatorUtils.property(thatLocator, "videoUrlList", rhsVideoUrlList), lhsVideoUrlList, rhsVideoUrlList, ((this.videoUrlList!= null)&&(!this.videoUrlList.isEmpty())), ((that.videoUrlList!= null)&&(!that.videoUrlList.isEmpty())))) { return false; } } { Long lhsDuration; lhsDuration = this.getDuration(); Long rhsDuration; rhsDuration = that.getDuration(); if (!strategy.equals(LocatorUtils.property(thisLocator, "duration", lhsDuration), LocatorUtils.property(thatLocator, "duration", rhsDuration), lhsDuration, rhsDuration, (this.duration!= null), (that.duration!= null))) { return false; } } { URI lhsTeaserUrl; lhsTeaserUrl = this.getTeaserUrl(); URI rhsTeaserUrl; rhsTeaserUrl = that.getTeaserUrl(); if (!strategy.equals(LocatorUtils.property(thisLocator, "teaserUrl", lhsTeaserUrl), LocatorUtils.property(thatLocator, "teaserUrl", rhsTeaserUrl), lhsTeaserUrl, rhsTeaserUrl, (this.teaserUrl!= null), (that.teaserUrl!= null))) { return false; } } { String lhsTitle; lhsTitle = this.getTitle(); String rhsTitle; rhsTitle = that.getTitle(); if (!strategy.equals(LocatorUtils.property(thisLocator, "title", lhsTitle), LocatorUtils.property(thatLocator, "title", rhsTitle), lhsTitle, rhsTitle, (this.title!= null), (that.title!= null))) { return false; } } { Integer lhsProcessingProgress; lhsProcessingProgress = this.getProcessingProgress(); Integer rhsProcessingProgress; rhsProcessingProgress = that.getProcessingProgress(); if (!strategy.equals(LocatorUtils.property(thisLocator, "processingProgress", lhsProcessingProgress), LocatorUtils.property(thatLocator, "processingProgress", rhsProcessingProgress), lhsProcessingProgress, rhsProcessingProgress, (this.processingProgress!= null), (that.processingProgress!= null))) { return false; } } { VideoProcessingStatusType lhsProcessingStatus; lhsProcessingStatus = this.getProcessingStatus(); VideoProcessingStatusType rhsProcessingStatus; rhsProcessingStatus = that.getProcessingStatus(); if (!strategy.equals(LocatorUtils.property(thisLocator, "processingStatus", lhsProcessingStatus), LocatorUtils.property(thatLocator, "processingStatus", rhsProcessingStatus), lhsProcessingStatus, rhsProcessingStatus, (this.processingStatus!= null), (that.processingStatus!= null))) { return false; } } return true; } @Generated(value = "com.sun.tools.xjc.Driver", date = "2021-08-07T09:44:49+02:00", comments = "JAXB RI v2.3.0") public boolean equals(Object object) { final EqualsStrategy2 strategy = JAXBEqualsStrategy.INSTANCE2; return equals(null, null, object, strategy); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.driver.ser; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; import io.netty.util.ReferenceCountUtil; import org.apache.tinkerpop.gremlin.driver.message.RequestMessage; import org.apache.tinkerpop.gremlin.driver.message.ResponseMessage; import org.apache.tinkerpop.gremlin.driver.message.ResponseStatusCode; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.io.gryo.GryoIo; import org.apache.tinkerpop.gremlin.structure.io.gryo.GryoMapper; import org.apache.tinkerpop.shaded.kryo.ClassResolver; import org.apache.tinkerpop.shaded.kryo.Kryo; import org.apache.tinkerpop.shaded.kryo.Serializer; import org.apache.tinkerpop.shaded.kryo.io.Input; import org.apache.tinkerpop.shaded.kryo.io.Output; import java.io.ByteArrayOutputStream; import java.lang.reflect.Method; import java.nio.charset.Charset; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.function.Supplier; import java.util.stream.Collectors; /** * @author Stephen Mallette (http://stephen.genoprime.com) */ public abstract class AbstractGryoMessageSerializerV1d0 extends AbstractMessageSerializer { private GryoMapper gryoMapper; private ThreadLocal<Kryo> kryoThreadLocal = new ThreadLocal<Kryo>() { @Override protected Kryo initialValue() { return gryoMapper.createMapper(); } }; private static final Charset UTF8 = Charset.forName("UTF-8"); public static final String TOKEN_CUSTOM = "custom"; public static final String TOKEN_SERIALIZE_RESULT_TO_STRING = "serializeResultToString"; public static final String TOKEN_USE_MAPPER_FROM_GRAPH = "useMapperFromGraph"; public static final String TOKEN_BUFFER_SIZE = "bufferSize"; public static final String TOKEN_CLASS_RESOLVER_SUPPLIER = "classResolverSupplier"; protected boolean serializeToString = false; private int bufferSize = 4096; /** * Creates an instance with a provided mapper configured {@link GryoMapper} instance. Note that this instance * will be overridden by {@link #configure} is called. */ public AbstractGryoMessageSerializerV1d0(final GryoMapper kryo) { this.gryoMapper = kryo; } /** * Called from the {@link #configure(Map, Map)} method right before the call to create the builder. Sub-classes * can choose to alter the builder or completely replace it. */ GryoMapper.Builder configureBuilder(final GryoMapper.Builder builder, final Map<String, Object> config, final Map<String, Graph> graphs) { return builder; } @Override public final void configure(final Map<String, Object> config, final Map<String, Graph> graphs) { final GryoMapper.Builder builder; final Object graphToUseForMapper = config.get(TOKEN_USE_MAPPER_FROM_GRAPH); if (graphToUseForMapper != null) { if (null == graphs) throw new IllegalStateException(String.format( "No graphs have been provided to the serializer and therefore %s is not a valid configuration", TOKEN_USE_MAPPER_FROM_GRAPH)); final Graph g = graphs.get(graphToUseForMapper.toString()); if (null == g) throw new IllegalStateException(String.format( "There is no graph named [%s] configured to be used in the %s setting", graphToUseForMapper, TOKEN_USE_MAPPER_FROM_GRAPH)); // a graph was found so use the mapper it constructs. this allows gryo to be auto-configured with any // custom classes that the implementation allows for builder = g.io(GryoIo.build()).mapper(); } else { // no graph was supplied so just use the default - this will likely be the case when using a graph // with no custom classes or a situation where the user needs complete control like when using two // distinct implementations each with their own custom classes. builder = GryoMapper.build(); } addIoRegistries(config, builder); addClassResolverSupplier(config, builder); addCustomClasses(config, builder); this.serializeToString = Boolean.parseBoolean(config.getOrDefault(TOKEN_SERIALIZE_RESULT_TO_STRING, "false").toString()); this.bufferSize = Integer.parseInt(config.getOrDefault(TOKEN_BUFFER_SIZE, "4096").toString()); this.gryoMapper = configureBuilder(builder, config, graphs).create(); } private void addClassResolverSupplier(final Map<String, Object> config, final GryoMapper.Builder builder) { final String className = (String) config.getOrDefault(TOKEN_CLASS_RESOLVER_SUPPLIER, null); if (className != null && !className.isEmpty()) { try { final Class<?> clazz = Class.forName(className); try { final Method instanceMethod = clazz.getDeclaredMethod("getInstance"); builder.classResolver((Supplier<ClassResolver>) instanceMethod.invoke(null)); } catch (Exception methodex) { // tried getInstance() and that failed so try newInstance() no-arg constructor builder.classResolver((Supplier<ClassResolver>) clazz.newInstance()); } } catch (Exception ex) { throw new IllegalStateException(ex); } } } private void addCustomClasses(final Map<String, Object> config, final GryoMapper.Builder builder) { final List<String> classNameList = getListStringFromConfig(TOKEN_CUSTOM, config); classNameList.stream().forEach(serializerDefinition -> { String className; Optional<String> serializerName; if (serializerDefinition.contains(";")) { final String[] split = serializerDefinition.split(";"); if (split.length != 2) throw new IllegalStateException(String.format("Invalid format for serializer definition [%s] - expected <class>;<serializer-class>", serializerDefinition)); className = split[0]; serializerName = Optional.of(split[1]); } else { serializerName = Optional.empty(); className = serializerDefinition; } try { final Class clazz = Class.forName(className); final Serializer serializer; if (serializerName.isPresent()) { final Class serializerClazz = Class.forName(serializerName.get()); serializer = (Serializer) serializerClazz.newInstance(); builder.addCustom(clazz, kryo -> serializer); } else builder.addCustom(clazz); } catch (Exception ex) { throw new IllegalStateException("Class could not be found", ex); } }); } @Override public ResponseMessage deserializeResponse(final ByteBuf msg) throws SerializationException { try { final Kryo kryo = kryoThreadLocal.get(); final byte[] payload = new byte[msg.capacity()]; msg.readBytes(payload); try (final Input input = new Input(payload)) { final UUID requestId = kryo.readObjectOrNull(input, UUID.class); final int status = input.readShort(); final String statusMsg = input.readString(); final Map<String,Object> statusAttributes = (Map<String,Object>) kryo.readClassAndObject(input); final Object result = kryo.readClassAndObject(input); final Map<String,Object> metaAttributes = (Map<String,Object>) kryo.readClassAndObject(input); return ResponseMessage.build(requestId) .code(ResponseStatusCode.getFromValue(status)) .statusMessage(statusMsg) .statusAttributes(statusAttributes) .result(result) .responseMetaData(metaAttributes) .create(); } } catch (Exception ex) { logger.warn("Response [{}] could not be deserialized by {}.", msg, GryoMessageSerializerV1d0.class.getName()); throw new SerializationException(ex); } } @Override public ByteBuf serializeResponseAsBinary(final ResponseMessage responseMessage, final ByteBufAllocator allocator) throws SerializationException { ByteBuf encodedMessage = null; try { final Kryo kryo = kryoThreadLocal.get(); try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) { final Output output = new Output(baos, bufferSize); // request id - if present kryo.writeObjectOrNull(output, responseMessage.getRequestId() != null ? responseMessage.getRequestId() : null, UUID.class); // status output.writeShort(responseMessage.getStatus().getCode().getValue()); output.writeString(responseMessage.getStatus().getMessage()); kryo.writeClassAndObject(output, responseMessage.getStatus().getAttributes()); // result kryo.writeClassAndObject(output, serializeToString ? serializeResultToString(responseMessage) : responseMessage.getResult().getData()); kryo.writeClassAndObject(output, responseMessage.getResult().getMeta()); final long size = output.total(); if (size > Integer.MAX_VALUE) throw new SerializationException(String.format("Message size of %s exceeds allocatable space", size)); output.flush(); encodedMessage = allocator.buffer((int) size); encodedMessage.writeBytes(baos.toByteArray()); } return encodedMessage; } catch (Exception ex) { if (encodedMessage != null) ReferenceCountUtil.release(encodedMessage); logger.warn("Response [{}] could not be serialized by {}.", responseMessage.toString(), GryoMessageSerializerV1d0.class.getName()); throw new SerializationException(ex); } } @Override public RequestMessage deserializeRequest(final ByteBuf msg) throws SerializationException { try { final Kryo kryo = kryoThreadLocal.get(); final byte[] payload = new byte[msg.readableBytes()]; msg.readBytes(payload); try (final Input input = new Input(payload)) { // by the time the message gets here, the mime length/type have been already read, so this part just // needs to process the payload. final UUID id = kryo.readObject(input, UUID.class); final String processor = input.readString(); final String op = input.readString(); final RequestMessage.Builder builder = RequestMessage.build(op) .overrideRequestId(id) .processor(processor); final Map<String, Object> args = kryo.readObject(input, HashMap.class); args.forEach(builder::addArg); return builder.create(); } } catch (Exception ex) { logger.warn("Request [{}] could not be deserialized by {}.", msg, GryoMessageSerializerV1d0.class.getName()); throw new SerializationException(ex); } } @Override public ByteBuf serializeRequestAsBinary(final RequestMessage requestMessage, final ByteBufAllocator allocator) throws SerializationException { ByteBuf encodedMessage = null; try { final Kryo kryo = kryoThreadLocal.get(); try (final ByteArrayOutputStream baos = new ByteArrayOutputStream()) { final Output output = new Output(baos, bufferSize); final String mimeType = mimeTypesSupported()[0]; output.writeByte(mimeType.length()); output.write(mimeType.getBytes(UTF8)); kryo.writeObject(output, requestMessage.getRequestId()); output.writeString(requestMessage.getProcessor()); output.writeString(requestMessage.getOp()); kryo.writeObject(output, requestMessage.getArgs()); final long size = output.total(); if (size > Integer.MAX_VALUE) throw new SerializationException(String.format("Message size of %s exceeds allocatable space", size)); output.flush(); encodedMessage = allocator.buffer((int) size); encodedMessage.writeBytes(baos.toByteArray()); } return encodedMessage; } catch (Exception ex) { if (encodedMessage != null) ReferenceCountUtil.release(encodedMessage); logger.warn("Request [{}] could not be serialized by {}.", requestMessage.toString(), GryoMessageSerializerV1d0.class.getName()); throw new SerializationException(ex); } } private Object serializeResultToString(final ResponseMessage msg) { if (msg.getResult() == null) return "null"; if (msg.getResult().getData() == null) return "null"; // the IteratorHandler should return a collection so keep it as such final Object o = msg.getResult().getData(); if (o instanceof Collection) { return ((Collection) o).stream().map(d -> null == d ? "null" : d.toString()).collect(Collectors.toList()); } else { return o.toString(); } } }
/* * Copyright 2006-2009 Odysseus Software GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.common.impl.javax.el; import java.beans.FeatureDescriptor; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; /** * Maintains an ordered composite list of child ELResolvers. Though only a single ELResolver is * associated with an ELContext, there are usually multiple resolvers considered for any given * variable or property resolution. ELResolvers are combined together using a CompositeELResolver, * to define rich semantics for evaluating an expression. For the * {@link #getValue(ELContext, Object, Object)}, {@link #getType(ELContext, Object, Object)}, * {@link #setValue(ELContext, Object, Object, Object)} and * {@link #isReadOnly(ELContext, Object, Object)} methods, an ELResolver is not responsible for * resolving all possible (base, property) pairs. In fact, most resolvers will only handle a base of * a single type. To indicate that a resolver has successfully resolved a particular (base, * property) pair, it must set the propertyResolved property of the ELContext to true. If it could * not handle the given pair, it must leave this property alone. The caller must ignore the return * value of the method if propertyResolved is false. The CompositeELResolver initializes the * ELContext.propertyResolved flag to false, and uses it as a stop condition for iterating through * its component resolvers. The ELContext.propertyResolved flag is not used for the design-time * methods {@link #getFeatureDescriptors(ELContext, Object)} and * {@link #getCommonPropertyType(ELContext, Object)}. Instead, results are collected and combined * from all child ELResolvers for these methods. */ public class CompositeELResolver extends ELResolver { private final List<ELResolver> resolvers = new ArrayList<>(); /** * Adds the given resolver to the list of component resolvers. Resolvers are consulted in the * order in which they are added. * * @param elResolver * The component resolver to add. * @throws NullPointerException * If the provided resolver is null. */ public void add(ELResolver elResolver) { if (elResolver == null) { throw new NullPointerException("resolver must not be null"); } resolvers.add(elResolver); } /** * Returns the most general type that this resolver accepts for the property argument, given a * base object. One use for this method is to assist tools in auto-completion. The result is * obtained by querying all component resolvers. The Class returned is the most specific class * that is a common superclass of all the classes returned by each component resolver's * getCommonPropertyType method. If null is returned by a resolver, it is skipped. * * @param context * The context of this evaluation. * @param base * The base object to return the most general property type for, or null to enumerate * the set of top-level variables that this resolver can evaluate. * @return null if this ELResolver does not know how to handle the given base object; otherwise * Object.class if any type of property is accepted; otherwise the most general property * type accepted for the given base. */ @Override public Class<?> getCommonPropertyType(ELContext context, Object base) { Class<?> result = null; for (int i = 0, l = resolvers.size(); i < l; i++) { Class<?> type = resolvers.get(i).getCommonPropertyType(context, base); if (type != null) { if (result == null || type.isAssignableFrom(result)) { result = type; } else if (!result.isAssignableFrom(type)) { result = Object.class; } } } return result; } /** * Returns information about the set of variables or properties that can be resolved for the * given base object. One use for this method is to assist tools in auto-completion. The results * are collected from all component resolvers. The propertyResolved property of the ELContext is * not relevant to this method. The results of all ELResolvers are concatenated. The Iterator * returned is an iterator over the collection of FeatureDescriptor objects returned by the * iterators returned by each component resolver's getFeatureDescriptors method. If null is * returned by a resolver, it is skipped. * * @param context * The context of this evaluation. * @param base * The base object to return the most general property type for, or null to enumerate * the set of top-level variables that this resolver can evaluate. * @return An Iterator containing zero or more (possibly infinitely more) FeatureDescriptor * objects, or null if this resolver does not handle the given base object or that the * results are too complex to represent with this method */ @Override public Iterator<FeatureDescriptor> getFeatureDescriptors(final ELContext context, final Object base) { return new Iterator<FeatureDescriptor>() { Iterator<FeatureDescriptor> empty = Collections.<FeatureDescriptor> emptyList().iterator(); Iterator<ELResolver> resolvers = CompositeELResolver.this.resolvers.iterator(); Iterator<FeatureDescriptor> features = empty; Iterator<FeatureDescriptor> features() { while (!features.hasNext() && resolvers.hasNext()) { features = resolvers.next().getFeatureDescriptors(context, base); if (features == null) { features = empty; } } return features; } public boolean hasNext() { return features().hasNext(); } public FeatureDescriptor next() { return features().next(); } public void remove() { features().remove(); } }; } /** * For a given base and property, attempts to identify the most general type that is acceptable * for an object to be passed as the value parameter in a future call to the * {@link #setValue(ELContext, Object, Object, Object)} method. The result is obtained by * querying all component resolvers. If this resolver handles the given (base, property) pair, * the propertyResolved property of the ELContext object must be set to true by the resolver, * before returning. If this property is not true after this method is called, the caller should * ignore the return value. First, propertyResolved is set to false on the provided ELContext. * Next, for each component resolver in this composite: * <ol> * <li>The getType() method is called, passing in the provided context, base and property.</li> * <li>If the ELContext's propertyResolved flag is false then iteration continues.</li> * <li>Otherwise, iteration stops and no more component resolvers are considered. The value * returned by getType() is returned by this method.</li> * </ol> * If none of the component resolvers were able to perform this operation, the value null is * returned and the propertyResolved flag remains set to false. Any exception thrown by * component resolvers during the iteration is propagated to the caller of this method. * * @param context * The context of this evaluation. * @param base * The base object to return the most general property type for, or null to enumerate * the set of top-level variables that this resolver can evaluate. * @param property * The property or variable to return the acceptable type for. * @return If the propertyResolved property of ELContext was set to true, then the most general * acceptable type; otherwise undefined. * @throws NullPointerException * if context is null * @throws PropertyNotFoundException * if base is not null and the specified property does not exist or is not readable. * @throws ELException * if an exception was thrown while performing the property or variable resolution. * The thrown exception must be included as the cause property of this exception, if * available. */ @Override public Class<?> getType(ELContext context, Object base, Object property) { context.setPropertyResolved(false); for (int i = 0, l = resolvers.size(); i < l; i++) { Class<?> type = resolvers.get(i).getType(context, base, property); if (context.isPropertyResolved()) { return type; } } return null; } /** * Attempts to resolve the given property object on the given base object by querying all * component resolvers. If this resolver handles the given (base, property) pair, the * propertyResolved property of the ELContext object must be set to true by the resolver, before * returning. If this property is not true after this method is called, the caller should ignore * the return value. First, propertyResolved is set to false on the provided ELContext. Next, * for each component resolver in this composite: * <ol> * <li>The getValue() method is called, passing in the provided context, base and property.</li> * <li>If the ELContext's propertyResolved flag is false then iteration continues.</li> * <li>Otherwise, iteration stops and no more component resolvers are considered. The value * returned by getValue() is returned by this method.</li> * </ol> * If none of the component resolvers were able to perform this operation, the value null is * returned and the propertyResolved flag remains set to false. Any exception thrown by * component resolvers during the iteration is propagated to the caller of this method. * * @param context * The context of this evaluation. * @param base * The base object to return the most general property type for, or null to enumerate * the set of top-level variables that this resolver can evaluate. * @param property * The property or variable to return the acceptable type for. * @return If the propertyResolved property of ELContext was set to true, then the result of the * variable or property resolution; otherwise undefined. * @throws NullPointerException * if context is null * @throws PropertyNotFoundException * if base is not null and the specified property does not exist or is not readable. * @throws ELException * if an exception was thrown while performing the property or variable resolution. * The thrown exception must be included as the cause property of this exception, if * available. */ @Override public Object getValue(ELContext context, Object base, Object property) { context.setPropertyResolved(false); for (int i = 0, l = resolvers.size(); i < l; i++) { Object value = resolvers.get(i).getValue(context, base, property); if (context.isPropertyResolved()) { return value; } } return null; } /** * For a given base and property, attempts to determine whether a call to * {@link #setValue(ELContext, Object, Object, Object)} will always fail. The result is obtained * by querying all component resolvers. If this resolver handles the given (base, property) * pair, the propertyResolved property of the ELContext object must be set to true by the * resolver, before returning. If this property is not true after this method is called, the * caller should ignore the return value. First, propertyResolved is set to false on the * provided ELContext. Next, for each component resolver in this composite: * <ol> * <li>The isReadOnly() method is called, passing in the provided context, base and property.</li> * <li>If the ELContext's propertyResolved flag is false then iteration continues.</li> * <li>Otherwise, iteration stops and no more component resolvers are considered. The value * returned by isReadOnly() is returned by this method.</li> * </ol> * If none of the component resolvers were able to perform this operation, the value false is * returned and the propertyResolved flag remains set to false. Any exception thrown by * component resolvers during the iteration is propagated to the caller of this method. * * @param context * The context of this evaluation. * @param base * The base object to return the most general property type for, or null to enumerate * the set of top-level variables that this resolver can evaluate. * @param property * The property or variable to return the acceptable type for. * @return If the propertyResolved property of ELContext was set to true, then true if the * property is read-only or false if not; otherwise undefined. * @throws NullPointerException * if context is null * @throws PropertyNotFoundException * if base is not null and the specified property does not exist or is not readable. * @throws ELException * if an exception was thrown while performing the property or variable resolution. * The thrown exception must be included as the cause property of this exception, if * available. */ @Override public boolean isReadOnly(ELContext context, Object base, Object property) { context.setPropertyResolved(false); for (int i = 0, l = resolvers.size(); i < l; i++) { boolean readOnly = resolvers.get(i).isReadOnly(context, base, property); if (context.isPropertyResolved()) { return readOnly; } } return false; } /** * Attempts to set the value of the given property object on the given base object. All * component resolvers are asked to attempt to set the value. If this resolver handles the given * (base, property) pair, the propertyResolved property of the ELContext object must be set to * true by the resolver, before returning. If this property is not true after this method is * called, the caller can safely assume no value has been set. First, propertyResolved is set to * false on the provided ELContext. Next, for each component resolver in this composite: * <ol> * <li>The setValue() method is called, passing in the provided context, base, property and * value.</li> * <li>If the ELContext's propertyResolved flag is false then iteration continues.</li> * <li>Otherwise, iteration stops and no more component resolvers are considered.</li> * </ol> * If none of the component resolvers were able to perform this operation, the propertyResolved * flag remains set to false. Any exception thrown by component resolvers during the iteration * is propagated to the caller of this method. * * @param context * The context of this evaluation. * @param base * The base object to return the most general property type for, or null to enumerate * the set of top-level variables that this resolver can evaluate. * @param property * The property or variable to return the acceptable type for. * @param value * The value to set the property or variable to. * @throws NullPointerException * if context is null * @throws PropertyNotFoundException * if base is not null and the specified property does not exist or is not readable. * @throws PropertyNotWritableException * if the given (base, property) pair is handled by this ELResolver but the * specified variable or property is not writable. * @throws ELException * if an exception was thrown while attempting to set the property or variable. The * thrown exception must be included as the cause property of this exception, if * available. */ @Override public void setValue(ELContext context, Object base, Object property, Object value) { context.setPropertyResolved(false); for (int i = 0, l = resolvers.size(); i < l; i++) { resolvers.get(i).setValue(context, base, property, value); if (context.isPropertyResolved()) { return; } } } /** * Attempts to resolve and invoke the given <code>method</code> on the given <code>base</code> * object by querying all component resolvers. * * <p> * If this resolver handles the given (base, method) pair, the <code>propertyResolved</code> * property of the <code>ELContext</code> object must be set to <code>true</code> by the * resolver, before returning. If this property is not <code>true</code> after this method is * called, the caller should ignore the return value. * </p> * * <p> * First, <code>propertyResolved</code> is set to <code>false</code> on the provided * <code>ELContext</code>. * </p> * * <p> * Next, for each component resolver in this composite: * <ol> * <li>The <code>invoke()</code> method is called, passing in the provided <code>context</code>, * <code>base</code>, <code>method</code>, <code>paramTypes</code>, and <code>params</code>.</li> * <li>If the <code>ELContext</code>'s <code>propertyResolved</code> flag is <code>false</code> * then iteration continues.</li> * <li>Otherwise, iteration stops and no more component resolvers are considered. The value * returned by <code>getValue()</code> is returned by this method.</li> * </ol> * </p> * * <p> * If none of the component resolvers were able to perform this operation, the value * <code>null</code> is returned and the <code>propertyResolved</code> flag remains set to * <code>false</code> * </p> * * <p> * Any exception thrown by component resolvers during the iteration is propagated to the caller * of this method. * </p> * * @param context * The context of this evaluation. * @param base * The bean on which to invoke the method * @param method * The simple name of the method to invoke. Will be coerced to a <code>String</code>. * If method is "&lt;init&gt;"or "&lt;clinit&gt;" a NoSuchMethodException is raised. * @param paramTypes * An array of Class objects identifying the method's formal parameter types, in * declared order. Use an empty array if the method has no parameters. Can be * <code>null</code>, in which case the method's formal parameter types are assumed * to be unknown. * @param params * The parameters to pass to the method, or <code>null</code> if no parameters. * @return The result of the method invocation (<code>null</code> if the method has a * <code>void</code> return type). * @since 2.2 */ @Override public Object invoke(ELContext context, Object base, Object method, Class<?>[] paramTypes, Object[] params) { context.setPropertyResolved(false); for (int i = 0, l = resolvers.size(); i < l; i++) { Object result = resolvers.get(i).invoke(context, base, method, paramTypes, params); if (context.isPropertyResolved()) { return result; } } return null; } }
package org.drools.chance.rule.builder; import org.drools.chance.factmodel.Imperfect; import org.drools.chance.common.ImperfectField; import org.drools.chance.core.util.IntHashMap; import org.drools.chance.reteoo.nodes.ChanceObjectTypeNode; import org.drools.compiler.DescrBuildError; import org.drools.lang.MVELDumper; import org.drools.lang.descr.*; import org.drools.rule.Declaration; import org.drools.rule.Pattern; import org.drools.rule.builder.PatternBuilder; import org.drools.rule.builder.RuleBuildContext; import org.drools.spi.InternalReadAccessor; import java.util.ArrayList; import java.util.List; public class ChanceRulePatternBuilder extends PatternBuilder { public ChanceRulePatternBuilder() { super(); } protected void processConstraintsAndBinds( final RuleBuildContext context, final PatternDescr patternDescr, final Pattern pattern ) { MVELDumper.MVELDumperContext mvelCtx = new MVELDumper.MVELDumperContext().setRuleContext(context); List constraints = patternDescr.getConstraint().getDescrs(); List<? extends BaseDescr> temp = new ArrayList<BaseDescr>( patternDescr.getConstraint().getDescrs() ); List<ConstraintConnectiveDescr> rootConstraints = new ArrayList<ConstraintConnectiveDescr>(); IntHashMap<Boolean> impFlags = new IntHashMap<Boolean>(); IntHashMap<Boolean> posFlags = new IntHashMap<Boolean>(); boolean hasImperfectConstraint = ( patternDescr.getAnnotation( Imperfect.class.getSimpleName() ) != null ); for ( BaseDescr b : temp ) { String expression; boolean isPositional = false; if ( b instanceof BindingDescr ) { BindingDescr bind = (BindingDescr) b; expression = bind.getVariable() + (bind.isUnification() ? " := " : " : ") + bind.getExpression(); } else if ( b instanceof ExprConstraintDescr ) { ExprConstraintDescr descr = (ExprConstraintDescr) b; expression = descr.getExpression(); isPositional = descr.getType() == ExprConstraintDescr.Type.POSITIONAL; } else { expression = b.getText(); } ConstraintConnectiveDescr result = parseExpression( context, patternDescr, b, expression ); boolean isImperfect = ChanceObjectTypeNode.isImperfect(pattern.getObjectType()) || analyzeConstraintConnective( result, context, pattern ); if ( isImperfect ) { hasImperfectConstraint = true; if ( ! isPositional && result != null ) { rootConstraints.add( result ); int index = constraints.indexOf( b ); int k = index; impFlags.put( k, true ); posFlags.put( k, false ); constraints.remove( index ); for ( BaseDescr sub : expand( result ) ) { constraints.add( index, sub ); ++k; impFlags.put( k, true ); posFlags.put( k, false ); } } } else { int index = constraints.indexOf( b ); impFlags.put( index, false ); rootConstraints.add( result ); if (! isPositional) { constraints.remove( index ); constraints.add( index, result ); } else { if ( result.getDescrs().get( 0 ) instanceof BindingDescr ) { constraints.remove( index ); constraints.add( index, result ); } } if ( ! isPositional ) { posFlags.put( index, false ); } else { posFlags.put( index, true ); } } } // Add the pattern-level "and" if ( hasImperfectConstraint ) { ConstraintConnectiveDescr root = new ConstraintConnectiveDescr( ConnectiveType.AND ); // mock "isA" to increase cardinality by 1 root.addDescr( new RelationalExprDescr( "isA", false, null, new AtomicExprDescr( "this" ), new AtomicExprDescr( patternDescr.getObjectType() ) ) ); for ( BaseDescr rootChild : rootConstraints ) { root.addDescr( rootChild ); } if ( patternDescr.getAnnotation( Imperfect.class.getSimpleName() ) != null ) { root.addAnnotation( patternDescr.getAnnotation( Imperfect.class.getSimpleName() ) ); } constraints.add( root ); impFlags.put( constraints.size() - 1, true ); posFlags.put( constraints.size() - 1, false ); } int index = 0; for ( BaseDescr b : patternDescr.getDescrs() ) { boolean isPositional = posFlags.get( index ); if ( b instanceof BindingDescr ) { // it is just a bind, so build it buildRuleBindings( context, patternDescr, pattern, (BindingDescr) b ); } else if ( b instanceof ConstraintConnectiveDescr ) { if ( impFlags.get( index ) == true ) { build(context, patternDescr, pattern, (ConstraintConnectiveDescr) b); } else { ConstraintConnectiveDescr result = (ConstraintConnectiveDescr) b; if ( result.getDescrs().size() == 1 && result.getDescrs().get( 0 ) instanceof BindingDescr ) { // it is just a bind, so build it buildRuleBindings( context, patternDescr, pattern, (BindingDescr) result.getDescrs().get( 0 ) ); } else { super.build(context, patternDescr, pattern, (ConstraintConnectiveDescr) b, mvelCtx); } } } else if ( isPositional ) { processPositional(context, patternDescr, pattern, (ExprConstraintDescr) b); } else { // need to build the actual constraint buildCcdDescr( context, patternDescr, pattern, b, new ConstraintConnectiveDescr( ), mvelCtx); } index++; } // combineConstraints(context, pattern); } protected void processDuplicateBindings( boolean isUnification, PatternDescr patternDescr, Pattern pattern, BaseDescr original, String leftExpression, String rightIdentifier, RuleBuildContext context ) { MVELDumper.MVELDumperContext mvelCtx = new MVELDumper.MVELDumperContext().setRuleContext(context); if ( isUnification ) { String expr = leftExpression + " == " + rightIdentifier; ConstraintConnectiveDescr result = parseExpression( context, patternDescr, patternDescr, expr ); BaseDescr constr = result.getDescrs().get( 0 ); buildCcdDescr( context, patternDescr, pattern, constr, result, mvelCtx); } else { // This declaration already exists, so throw an Exception context.addError(new DescrBuildError(context.getParentDescr(), patternDescr, null, "Duplicate declaration for variable '" + leftExpression + "' in the rule '" + context.getRule().getName() + "'")); } } private boolean analyzeConstraintConnective( BaseDescr descr, RuleBuildContext context, Pattern pattern ) { if ( descr instanceof ConstraintConnectiveDescr ) { ConstraintConnectiveDescr ccd = (ConstraintConnectiveDescr) descr; if ( ( (ConstraintConnectiveDescr) descr ).getAnnotation( Imperfect.class.getSimpleName() ) != null ) { return true; } for ( BaseDescr child : ccd.getDescrs() ) { if ( analyzeConstraintConnective( child, context, pattern ) ) { return true; } } } else if ( descr instanceof RelationalExprDescr ) { RelationalExprDescr rel = (RelationalExprDescr) descr; if ( ChanceOperators.isImperfect( rel.getOperator() ) ) { return true; } String left = rel.getLeft() instanceof BindingDescr ? ( (BindingDescr) rel.getLeft() ).getExpression() : rel.getLeft().toString(); InternalReadAccessor extractor = getFieldReadAccessor( context, rel, pattern.getObjectType(), left, null, false ); if ( extractor != null ) { if ( extractor.getExtractToClass().isAssignableFrom( ImperfectField.class ) ) { return true; } else { return false; } } if ( analyzeConstraintConnective( rel.getLeft(), context, pattern ) ) { return true; } if ( analyzeConstraintConnective( rel.getRight(), context, pattern ) ) { return true; } } else if ( descr instanceof AtomicExprDescr ) { AtomicExprDescr atom = (AtomicExprDescr) descr; if ( atom.isLiteral() ) { return false; } // TODO ? if ( pattern.getInnerDeclarations().get( atom.getExpression() ) != null ) { Declaration ref = pattern.getInnerDeclarations().get( atom.getExpression() ); return ref.getExtractor().getExtractToClass().isAssignableFrom( ImperfectField.class ); } return false; } else if ( descr instanceof BindingDescr ) { return false; } else { throw new UnsupportedOperationException( "Can't analyze " + descr.getClass() + "for imperfection" ); } return false; } protected void build( RuleBuildContext context, PatternDescr patternDescr, Pattern pattern, ConstraintConnectiveDescr descr ) { pattern.addConstraint( ((ChanceMVELConstraintBuilder) getConstraintBuilder( context )).buildOperatorConstraint( context, patternDescr, pattern, descr ) ); } private List<BaseDescr> expand( ConstraintConnectiveDescr d ) { int N = d.getDescrs().size(); List<BaseDescr> ret = new ArrayList<BaseDescr>(); for ( int j = 0; j < N; j++ ) { BaseDescr child = d.getDescrs().get(j); if ( child instanceof ConstraintConnectiveDescr ) { ret.addAll( 0, expand( (ConstraintConnectiveDescr) child) ); } else { ret.add( 0, child ); } } ret.add( 0, d ); return ret; } protected boolean addConstraintToPattern( final RuleBuildContext context, final Pattern pattern, final RelationalExprDescr relDescr, String expr, String value1, String value2, boolean isConstant) { InternalReadAccessor extractor = getFieldReadAccessor( context, relDescr, pattern.getObjectType(), value1, null, false ); if ( extractor == null ) { return false; // impossible to create extractor } boolean isOperatorImperfect = ChanceOperators.isImperfect( relDescr.getOperatorDescr().getOperator() ); if ( ! isOperatorImperfect ) { // operator works on crisp values. Any imperfect-field expression must be narrowed down to its crisp, certain value BaseDescr leftDescr = relDescr.getLeft(); BaseDescr rightDescr = relDescr.getRight(); boolean isLeftImperfect = ImperfectField.class.isAssignableFrom( extractor.getExtractToClass() ); boolean isRightImperfect = false; if ( rightDescr instanceof AtomicExprDescr ) { AtomicExprDescr right = ((AtomicExprDescr) relDescr.getRight()); String potentialVar = right.getExpression(); Declaration decl = context.getDeclarationResolver().getDeclaration(context.getRule(), potentialVar); if ( decl != null && decl.getExtractor() != null && ImperfectField.class.isAssignableFrom( decl.getExtractor().getExtractToClass() ) ) { isRightImperfect = true; } } if ( leftDescr instanceof AtomicExprDescr ) { AtomicExprDescr left = ((AtomicExprDescr) leftDescr ); if ( isLeftImperfect ) { value1 = left.getExpression() + ".getCrisp()"; left.setExpression( value1 ); } expr = ( (AtomicExprDescr) leftDescr ).getExpression(); } else if ( leftDescr instanceof BindingDescr ) { BindingDescr left = ((BindingDescr) leftDescr); if ( isLeftImperfect ) { value1 = left.getExpression() + ".getCrisp()"; left.setExpression( value1 ); } expr = ((BindingDescr) leftDescr).getExpression(); } expr += " " + relDescr.getOperator() + " "; if ( rightDescr instanceof AtomicExprDescr ) { AtomicExprDescr right = ((AtomicExprDescr) rightDescr ); if ( isRightImperfect ) { right.setExpression( right.getExpression() + ".getCrisp()" ); } expr += right.getExpression(); } else { throw new UnsupportedOperationException( "ChanceRulePatternBuilder can't process right expressions of this type yet " + rightDescr ); } extractor = getFieldReadAccessor( context, relDescr, pattern.getObjectType(), value1, null, false ); if ( extractor == null ) { return false; // impossible to create extractor } } return super.addConstraintToPattern( context, pattern, relDescr, expr, value1, value2, isConstant, extractor ); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.apex.malhar.contrib.parser; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * <p> * This is schema that defines fields and their regex * The operators use this information to validate the incoming tuples. * Information from JSON schema is saved in this object and is used by the * operators * <p> * <br> * <br> * Example schema <br> * <br> * {@code{ "fields": [{"field": "host","regex": "^([0-9.]+)"}, * {"field": "userName","regex": "(.*?)"}, * {"field": "request","regex": "\"((?:[^\"]|\")+)\""}, * {"field": "statusCode","regex": "(\\d{3})"}, * {"field": "bytes","regex": "(\\d+|-)"}]} * * @since 3.7.0 */ public class LogSchemaDetails { /** * This holds the list of field names in the same order as in the schema */ private List<String> fieldNames = new LinkedList(); private List<Field> fields = new LinkedList(); private Pattern compiledPattern = null; /** * This holds regex pattern for the schema */ private String pattern; public LogSchemaDetails(String json) { try { initialize(json); createPattern(); this.compiledPattern = Pattern.compile(this.pattern); } catch (JSONException | IOException e) { logger.error("{}", e); throw new IllegalArgumentException(e); } } /** * For a given json string, this method sets the field members * @param json * @throws JSONException * @throws IOException */ private void initialize(String json) throws JSONException, IOException { JSONObject jsonObject = new JSONObject(json); JSONArray fieldArray = jsonObject.getJSONArray("fields"); for(int i = 0; i < fieldArray.length(); i++) { JSONObject obj = fieldArray.getJSONObject(i); Field field = new Field(obj.getString("field"), obj.getString("regex")); this.fields.add(field); this.fieldNames.add(field.name); } } /** * creates regex group pattern from the regex given for each field */ public void createPattern() { StringBuffer pattern = new StringBuffer(); for(Field field: this.getFields()) { pattern.append(field.getRegex()).append(" "); } logger.info("Created pattern for parsing the log {}", pattern.toString().trim()); this.setPattern(pattern.toString().trim()); } /** * creates json object by matching the log with given pattern * @param log * @return logObject * @throws Exception */ public JSONObject createJsonFromLog(String log) throws JSONException { JSONObject logObject = null; if (this.compiledPattern != null) { Matcher m = this.compiledPattern.matcher(log); int count = m.groupCount(); if (m.find()) { int i = 1; logObject = new JSONObject(); for(String field: this.getFieldNames()) { if (i > count) { break; } logObject.put(field, m.group(i)); i++; } } } return logObject; } /** * Get the list of fieldNames mentioned in schema * @return fieldNames */ public List<String> getFieldNames() { return fieldNames; } /** * Get the list of fields (field, regex) mentioned in schema * @return fields */ public List<Field> getFields() { return fields; } /** * Get the regex pattern for the schema * @return pattern */ public String getPattern() { return pattern; } /** * Set the regex pattern for schema * @param pattern */ public void setPattern(String pattern) { this.pattern = pattern; } public class Field { /** * name of the field */ private String name; /** * regular expression for the field */ private String regex; public Field(String name, String regex) { this.name = name; this.regex = regex; } /** * Get the name of the field * @return name */ public String getName() { return name; } /** * Set the name of the field * @param name */ public void setName(String name) { this.name = name; } /** * Get the regular expression of the field * @return regex */ public String getRegex() { return regex; } /** * Set the regular expression of the field * @param regex */ public void setRegex(String regex) { this.regex = regex; } @Override public String toString() { return "Fields [name=" + name + ", regex=" + regex +"]"; } } private static final Logger logger = LoggerFactory.getLogger(LogSchemaDetails.class); }
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.dataservices.core.odata; import org.apache.olingo.commons.api.data.Entity; import org.apache.olingo.commons.api.data.EntityCollection; import org.apache.olingo.commons.api.edm.EdmBindingTarget; import org.apache.olingo.commons.api.edm.EdmEntitySet; import org.apache.olingo.commons.api.http.HttpStatusCode; import org.apache.olingo.server.api.ODataApplicationException; import org.apache.olingo.server.api.uri.queryoption.CountOption; import org.apache.olingo.server.api.uri.queryoption.FilterOption; import org.apache.olingo.server.api.uri.queryoption.OrderByItem; import org.apache.olingo.server.api.uri.queryoption.OrderByOption; import org.apache.olingo.server.api.uri.queryoption.SkipOption; import org.apache.olingo.server.api.uri.queryoption.SkipTokenOption; import org.apache.olingo.server.api.uri.queryoption.TopOption; import org.apache.olingo.server.api.uri.queryoption.expression.ExpressionVisitException; import org.wso2.carbon.dataservices.core.odata.expression.ExpressionVisitorImpl; import org.wso2.carbon.dataservices.core.odata.expression.ODataConstants; import org.wso2.carbon.dataservices.core.odata.expression.operand.TypedOperand; import org.wso2.carbon.dataservices.core.odata.expression.operand.VisitorOperand; import java.net.URI; import java.net.URISyntaxException; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.Locale; public class QueryHandler { private static final int MAX_PAGE_SIZE = 10; /** * This method applies count query option to the given entity collection. * * @param countOption Count option * @param entitySet Entity collection */ public static void applyCountSystemQueryOption(final CountOption countOption, final EntityCollection entitySet) { if (countOption.getValue()) { entitySet.setCount(entitySet.getEntities().size()); } } /** * This method applies filter query option to the given entity collection. * * @param filterOption Filter option * @param entitySet Entity collection * @param edmEntitySet Entity set * @throws ODataApplicationException */ public static void applyFilterSystemQuery(final FilterOption filterOption, final EntityCollection entitySet, final EdmBindingTarget edmEntitySet) throws ODataApplicationException { try { final Iterator<Entity> iter = entitySet.getEntities().iterator(); while (iter.hasNext()) { final VisitorOperand operand = filterOption.getExpression().accept(new ExpressionVisitorImpl(iter.next(), edmEntitySet)); final TypedOperand typedOperand = operand.asTypedOperand(); if (typedOperand.is(ODataConstants.primitiveBoolean)) { if (Boolean.FALSE.equals(typedOperand.getTypedValue(Boolean.class))) { iter.remove(); } } else { throw new ODataApplicationException( "Invalid filter expression. Filter expressions must return a value of " + "type Edm.Boolean", HttpStatusCode.BAD_REQUEST.getStatusCode(), Locale.ROOT); } } } catch (ExpressionVisitException e) { throw new ODataApplicationException("Exception in filter evaluation", HttpStatusCode.INTERNAL_SERVER_ERROR.getStatusCode(), Locale.ROOT); } } /** * This method applies top query option to the given entity collection. * * @param topOption Top option * @param entitySet Entity Collection * @throws ODataApplicationException */ public static void applyTopSystemQueryOption(final TopOption topOption, final EntityCollection entitySet) throws ODataApplicationException { if (topOption.getValue() >= 0) { reduceToSize(entitySet, topOption.getValue()); } else { throw new ODataApplicationException("Top value must be positive", HttpStatusCode.BAD_REQUEST.getStatusCode(), Locale.ROOT); } } /** * This method reduce entities from the collection for the given limit size. * * @param entitySet Entity collection * @param limit Limit size */ private static void reduceToSize(final EntityCollection entitySet, final int limit) { while (entitySet.getEntities().size() > limit) { entitySet.getEntities().remove(entitySet.getEntities().size() - 1); } } /** * This method applies skip query option to the given entity collection. * * @param skipOption Skip option * @param entitySet Entity collection * @throws ODataApplicationException */ public static void applySkipSystemQueryHandler(final SkipOption skipOption, final EntityCollection entitySet) throws ODataApplicationException { if (skipOption.getValue() >= 0) { popAtMost(entitySet, skipOption.getValue()); } else { throw new ODataApplicationException("Skip value must be positive", HttpStatusCode.BAD_REQUEST.getStatusCode(), Locale.ROOT); } } private static void popAtMost(final EntityCollection entitySet, final int n) { final Iterator<Entity> iter = entitySet.getEntities().iterator(); int i = 0; while (iter.hasNext() && i < n) { iter.next(); iter.remove(); i++; } } /** * This method applies server-side paging to the given entity collection. * * @param skipTokenOption Current skip token option (from a previous response's next link) * @param entityCollection Entity collection * @param edmEntitySet EDM entity set to decide whether paging must be done * @param rawRequestUri Request URI (used to construct the next link) * @param preferredPageSize Preference for page size * @return Chosen page size * @throws ODataApplicationException */ public static Integer applyServerSidePaging(final SkipTokenOption skipTokenOption, EntityCollection entityCollection, final EdmEntitySet edmEntitySet, final String rawRequestUri, final Integer preferredPageSize) throws ODataApplicationException { if (edmEntitySet != null) { final int pageSize = getPageSize(preferredPageSize); final int page = getPage(skipTokenOption); final int itemsToSkip = pageSize * page; if (itemsToSkip <= entityCollection.getEntities().size()) { popAtMost(entityCollection, itemsToSkip); final int remainingItems = entityCollection.getEntities().size(); reduceToSize(entityCollection, pageSize); // Determine if a new next Link has to be provided. if (remainingItems > pageSize) { entityCollection.setNext(createNextLink(rawRequestUri, edmEntitySet, page + 1)); } } else { throw new ODataApplicationException("Nothing found.", HttpStatusCode.NOT_FOUND.getStatusCode(), Locale.ROOT); } return pageSize; } return null; } /** * This method creates next url link. * * @param rawRequestUri Request uri * @param entitySet EntitySet * @param page Page num * @return uri * @throws ODataApplicationException */ private static URI createNextLink(final String rawRequestUri, final EdmEntitySet entitySet, final int page) throws ODataApplicationException { String nextLink = rawRequestUri + "/" + entitySet.getName() + "?$skiptoken=" + page; try { return new URI(nextLink); } catch (final URISyntaxException e) { throw new ODataApplicationException("Exception while constructing next link", HttpStatusCode.INTERNAL_SERVER_ERROR.getStatusCode(), Locale.ROOT, e); } } /** * This method returns the page size. * * @param preferredPageSize Preferred page size * @return page size */ private static int getPageSize(final Integer preferredPageSize) { return preferredPageSize == null ? MAX_PAGE_SIZE : preferredPageSize; } /** * This method returns the page number. * * @param skipTokenOption Skip token option * @return page * @throws ODataApplicationException */ private static int getPage(final SkipTokenOption skipTokenOption) throws ODataApplicationException { final String value = skipTokenOption.getValue(); try { return Integer.parseInt(value); } catch (final NumberFormatException e) { throw new ODataApplicationException("Invalid skip token", HttpStatusCode.BAD_REQUEST.getStatusCode(), Locale.ROOT, e); } } /** * This method applies order by option query to the given entity collection. * * @param orderByOption Order by option * @param entitySet Entity Set * @param edmBindingTarget Binding Target */ public static void applyOrderByOption(final OrderByOption orderByOption, final EntityCollection entitySet, final EdmBindingTarget edmBindingTarget) { Collections.sort(entitySet.getEntities(), new Comparator<Entity>() { @Override @SuppressWarnings({ "unchecked", "rawtypes" }) public int compare(final Entity e1, final Entity e2) { // Evaluate the first order option for both entity // If and only if the result of the previous order option is equals to 0 // evaluate the next order option until all options are evaluated or they are not equals int result = 0; for (int i = 0; i < orderByOption.getOrders().size() && result == 0; i++) { try { final OrderByItem item = orderByOption.getOrders().get(i); final TypedOperand op1 = item.getExpression() .accept(new ExpressionVisitorImpl(e1, edmBindingTarget)) .asTypedOperand(); final TypedOperand op2 = item.getExpression() .accept(new ExpressionVisitorImpl(e2, edmBindingTarget)) .asTypedOperand(); if (op1.isNull() || op2.isNull()) { if (op1.isNull() && op2.isNull()) { result = 0; // null is equals to null } else { result = op1.isNull() ? -1 : 1; } } else { Object o1 = op1.getValue(); Object o2 = op2.getValue(); if (o1.getClass() == o2.getClass() && o1 instanceof Comparable) { result = ((Comparable) o1).compareTo(o2); } else { result = 0; } } result = item.isDescending() ? result * -1 : result; } catch (ExpressionVisitException | ODataApplicationException e) { throw new RuntimeException(e); } } return result; } }); } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.schemaorg.SchemaOrgTypeImpl; import com.google.schemaorg.ValueType; import com.google.schemaorg.core.datatype.DateTime; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.GoogConstants; import com.google.schemaorg.goog.PopularityScoreSpecification; /** Implementation of {@link BefriendAction}. */ public class BefriendActionImpl extends InteractActionImpl implements BefriendAction { private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet(); private static ImmutableSet<String> initializePropertySet() { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); builder.add(CoreConstants.PROPERTY_ACTION_STATUS); builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE); builder.add(CoreConstants.PROPERTY_AGENT); builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME); builder.add(CoreConstants.PROPERTY_DESCRIPTION); builder.add(CoreConstants.PROPERTY_END_TIME); builder.add(CoreConstants.PROPERTY_ERROR); builder.add(CoreConstants.PROPERTY_IMAGE); builder.add(CoreConstants.PROPERTY_INSTRUMENT); builder.add(CoreConstants.PROPERTY_LOCATION); builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE); builder.add(CoreConstants.PROPERTY_NAME); builder.add(CoreConstants.PROPERTY_OBJECT); builder.add(CoreConstants.PROPERTY_PARTICIPANT); builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION); builder.add(CoreConstants.PROPERTY_RESULT); builder.add(CoreConstants.PROPERTY_SAME_AS); builder.add(CoreConstants.PROPERTY_START_TIME); builder.add(CoreConstants.PROPERTY_TARGET); builder.add(CoreConstants.PROPERTY_URL); builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION); builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE); return builder.build(); } static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<BefriendAction.Builder> implements BefriendAction.Builder { @Override public BefriendAction.Builder addActionStatus(ActionStatusType value) { return addProperty(CoreConstants.PROPERTY_ACTION_STATUS, value); } @Override public BefriendAction.Builder addActionStatus(String value) { return addProperty(CoreConstants.PROPERTY_ACTION_STATUS, Text.of(value)); } @Override public BefriendAction.Builder addAdditionalType(URL value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value); } @Override public BefriendAction.Builder addAdditionalType(String value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value)); } @Override public BefriendAction.Builder addAgent(Organization value) { return addProperty(CoreConstants.PROPERTY_AGENT, value); } @Override public BefriendAction.Builder addAgent(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_AGENT, value.build()); } @Override public BefriendAction.Builder addAgent(Person value) { return addProperty(CoreConstants.PROPERTY_AGENT, value); } @Override public BefriendAction.Builder addAgent(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_AGENT, value.build()); } @Override public BefriendAction.Builder addAgent(String value) { return addProperty(CoreConstants.PROPERTY_AGENT, Text.of(value)); } @Override public BefriendAction.Builder addAlternateName(Text value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value); } @Override public BefriendAction.Builder addAlternateName(String value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value)); } @Override public BefriendAction.Builder addDescription(Text value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value); } @Override public BefriendAction.Builder addDescription(String value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value)); } @Override public BefriendAction.Builder addEndTime(DateTime value) { return addProperty(CoreConstants.PROPERTY_END_TIME, value); } @Override public BefriendAction.Builder addEndTime(String value) { return addProperty(CoreConstants.PROPERTY_END_TIME, Text.of(value)); } @Override public BefriendAction.Builder addError(Thing value) { return addProperty(CoreConstants.PROPERTY_ERROR, value); } @Override public BefriendAction.Builder addError(Thing.Builder value) { return addProperty(CoreConstants.PROPERTY_ERROR, value.build()); } @Override public BefriendAction.Builder addError(String value) { return addProperty(CoreConstants.PROPERTY_ERROR, Text.of(value)); } @Override public BefriendAction.Builder addImage(ImageObject value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public BefriendAction.Builder addImage(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value.build()); } @Override public BefriendAction.Builder addImage(URL value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public BefriendAction.Builder addImage(String value) { return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value)); } @Override public BefriendAction.Builder addInstrument(Thing value) { return addProperty(CoreConstants.PROPERTY_INSTRUMENT, value); } @Override public BefriendAction.Builder addInstrument(Thing.Builder value) { return addProperty(CoreConstants.PROPERTY_INSTRUMENT, value.build()); } @Override public BefriendAction.Builder addInstrument(String value) { return addProperty(CoreConstants.PROPERTY_INSTRUMENT, Text.of(value)); } @Override public BefriendAction.Builder addLocation(Place value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value); } @Override public BefriendAction.Builder addLocation(Place.Builder value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value.build()); } @Override public BefriendAction.Builder addLocation(PostalAddress value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value); } @Override public BefriendAction.Builder addLocation(PostalAddress.Builder value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value.build()); } @Override public BefriendAction.Builder addLocation(Text value) { return addProperty(CoreConstants.PROPERTY_LOCATION, value); } @Override public BefriendAction.Builder addLocation(String value) { return addProperty(CoreConstants.PROPERTY_LOCATION, Text.of(value)); } @Override public BefriendAction.Builder addMainEntityOfPage(CreativeWork value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public BefriendAction.Builder addMainEntityOfPage(CreativeWork.Builder value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build()); } @Override public BefriendAction.Builder addMainEntityOfPage(URL value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public BefriendAction.Builder addMainEntityOfPage(String value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value)); } @Override public BefriendAction.Builder addName(Text value) { return addProperty(CoreConstants.PROPERTY_NAME, value); } @Override public BefriendAction.Builder addName(String value) { return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value)); } @Override public BefriendAction.Builder addObject(Thing value) { return addProperty(CoreConstants.PROPERTY_OBJECT, value); } @Override public BefriendAction.Builder addObject(Thing.Builder value) { return addProperty(CoreConstants.PROPERTY_OBJECT, value.build()); } @Override public BefriendAction.Builder addObject(String value) { return addProperty(CoreConstants.PROPERTY_OBJECT, Text.of(value)); } @Override public BefriendAction.Builder addParticipant(Organization value) { return addProperty(CoreConstants.PROPERTY_PARTICIPANT, value); } @Override public BefriendAction.Builder addParticipant(Organization.Builder value) { return addProperty(CoreConstants.PROPERTY_PARTICIPANT, value.build()); } @Override public BefriendAction.Builder addParticipant(Person value) { return addProperty(CoreConstants.PROPERTY_PARTICIPANT, value); } @Override public BefriendAction.Builder addParticipant(Person.Builder value) { return addProperty(CoreConstants.PROPERTY_PARTICIPANT, value.build()); } @Override public BefriendAction.Builder addParticipant(String value) { return addProperty(CoreConstants.PROPERTY_PARTICIPANT, Text.of(value)); } @Override public BefriendAction.Builder addPotentialAction(Action value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value); } @Override public BefriendAction.Builder addPotentialAction(Action.Builder value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build()); } @Override public BefriendAction.Builder addPotentialAction(String value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value)); } @Override public BefriendAction.Builder addResult(Thing value) { return addProperty(CoreConstants.PROPERTY_RESULT, value); } @Override public BefriendAction.Builder addResult(Thing.Builder value) { return addProperty(CoreConstants.PROPERTY_RESULT, value.build()); } @Override public BefriendAction.Builder addResult(String value) { return addProperty(CoreConstants.PROPERTY_RESULT, Text.of(value)); } @Override public BefriendAction.Builder addSameAs(URL value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, value); } @Override public BefriendAction.Builder addSameAs(String value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value)); } @Override public BefriendAction.Builder addStartTime(DateTime value) { return addProperty(CoreConstants.PROPERTY_START_TIME, value); } @Override public BefriendAction.Builder addStartTime(String value) { return addProperty(CoreConstants.PROPERTY_START_TIME, Text.of(value)); } @Override public BefriendAction.Builder addTarget(EntryPoint value) { return addProperty(CoreConstants.PROPERTY_TARGET, value); } @Override public BefriendAction.Builder addTarget(EntryPoint.Builder value) { return addProperty(CoreConstants.PROPERTY_TARGET, value.build()); } @Override public BefriendAction.Builder addTarget(String value) { return addProperty(CoreConstants.PROPERTY_TARGET, Text.of(value)); } @Override public BefriendAction.Builder addUrl(URL value) { return addProperty(CoreConstants.PROPERTY_URL, value); } @Override public BefriendAction.Builder addUrl(String value) { return addProperty(CoreConstants.PROPERTY_URL, Text.of(value)); } @Override public BefriendAction.Builder addDetailedDescription(Article value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value); } @Override public BefriendAction.Builder addDetailedDescription(Article.Builder value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build()); } @Override public BefriendAction.Builder addDetailedDescription(String value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value)); } @Override public BefriendAction.Builder addPopularityScore(PopularityScoreSpecification value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value); } @Override public BefriendAction.Builder addPopularityScore(PopularityScoreSpecification.Builder value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build()); } @Override public BefriendAction.Builder addPopularityScore(String value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value)); } @Override public BefriendAction build() { return new BefriendActionImpl(properties, reverseMap); } } public BefriendActionImpl( Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) { super(properties, reverseMap); } @Override public String getFullTypeName() { return CoreConstants.TYPE_BEFRIEND_ACTION; } @Override public boolean includesProperty(String property) { return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property) || PROPERTY_SET.contains(GoogConstants.NAMESPACE + property) || PROPERTY_SET.contains(property); } }
/* * Copyright 2017 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.vr.sdk.samples.videoplayer; import android.content.Context; import android.graphics.RectF; import android.opengl.GLES20; import android.opengl.Matrix; import com.google.android.exoplayer2.decoder.DecoderCounters; import com.google.vr.ndk.base.BufferViewport; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import java.util.Iterator; import java.util.Map; import java.util.TreeMap; import java.util.concurrent.TimeUnit; /** * Handles positioning the video in the correct place in the scene and rendering a transparent hole * into the color buffer in the same place. All methods in this class should be called on the * application's GL thread, unless otherwise noted. */ public class VideoScene { private static final String TAG = "VideoScreen"; private static final RectF videoUv = new RectF(0.f, 1.f, 1.f, 0.f); private final Settings settings; // Helper object for GL resources used by the scene. private final Resources resources = new Resources(); // Scratch array for the transform from SPRITE_VERTICES_DATA space to eye space. private final float[] eyeFromQuad = new float[16]; // Scratch array for the transform from SPRITE_VERTICES_DATA space to perspective eye space. private final float[] perspectiveFromQuad = new float[16]; // Transform from SPRITE_VERTICES_DATA space to world space. Set by setVideoTransform(). private final float[] worldFromQuad = new float[16]; // Transform from double-sided unit box with the video to the position of the frame rate bar. // Note that this is effectively just the offset and scale of the bar with respect to the video, // not its world position. private final float[] frameRateBarFromQuad = { 1.f, 0.f, 0.f, 0.f, 0.f, 0.1f, 0.f, 0.f, 0.f, 0.f, 1.f, 0.f, 0.f, -1.2f, 0.f, 1.f }; // Transform from SPRITE_VERTICES_DATA space to world space for the framerate bar under the video. private final float[] worldFromFrameRateBar = new float[16]; private final TreeMap<Long, Integer> frameCounts = new TreeMap<Long, Integer>(); private volatile int videoSurfaceID = BufferViewport.EXTERNAL_SURFACE_ID_NONE; private volatile boolean isVideoPlaying = false; private float currentFpsFraction = 0.f; public VideoScene(Settings settings) { this.settings = settings; } /** * Sets whether video playback has started. If video playback has not started, the loading splash * screen is drawn. * * @param hasPlaybackStarted True if video is playing. */ public void setHasVideoPlaybackStarted(boolean hasPlaybackStarted) { isVideoPlaying = hasPlaybackStarted; } /** * Set the ID of the external surface used to display the video. * Can be called from any thread. The ID will be updated on the next frame. */ public void setVideoSurfaceId(int id) { videoSurfaceID = id; } /** * Specify where in the world space the video should appear. * * @param newWorldFromQuad Matrix in OpenGL format containing a transformation that positions * a quad with vertices (1, 1, 0), (1, -1, 0), (-1, 1, 0), (-1, -1, 0) in the desired place * in world space. The video will be shown at this quad's position. */ public void setVideoTransform(float[] newWorldFromQuad) { System.arraycopy(newWorldFromQuad, 0, this.worldFromQuad, 0, 16); } /** * Update a viewport so that it positions the video in the correct place in the scene seen by the * user and references the correct external surface. Can be safely called from a different thread * than the setter functions. * * @param viewport Viewport to update. * @param eyeFromWorld Matrix in OpenGL format containing the eye-from-world transformation, * i.e., without the projective component. */ public void updateViewport(BufferViewport viewport, float[] eyeFromWorld) { Matrix.multiplyMM(eyeFromQuad, 0, eyeFromWorld, 0, worldFromQuad, 0); viewport.setSourceUv(videoUv); viewport.setSourceBufferIndex(BufferViewport.BUFFER_INDEX_EXTERNAL_SURFACE); viewport.setExternalSurfaceId(videoSurfaceID); viewport.setTransform(eyeFromQuad); } /** * Draws the hole punch or a sprite that is in the same position as the video * * @param perspectiveFromWorld Transformation from world space to clip space. */ public void draw(float[] perspectiveFromWorld) { Matrix.multiplyMM(perspectiveFromQuad, 0, perspectiveFromWorld, 0, worldFromQuad, 0); int program; if (isVideoPlaying) { program = resources.solidColorProgram; } else { program = resources.spriteProgram; } GLES20.glUseProgram(program); GLUtil.checkGlError(TAG, "glUseProgram"); if (program == resources.spriteProgram) { GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, resources.loadingTextureId); GLUtil.checkGlError(TAG, "glBindTexture"); final int uImageTexture = GLES20.glGetUniformLocation(program, "uImageTexture"); GLUtil.checkGlError(TAG, "glGetUniformLocation uImageTexture"); if (uImageTexture == -1) { throw new RuntimeException("Could not get uniform location for uImageTexture"); } GLES20.glUniform1i(uImageTexture, 0); } else { final int uColor = GLES20.glGetUniformLocation(program, "uColor"); GLES20.glUniform4f(uColor, 0.f, 0.f, 0.f, 0.f); } final int positionAttribute = GLES20.glGetAttribLocation(program, "aPosition"); GLUtil.checkGlError(TAG, "glGetAttribLocation aPosition"); GLES20.glVertexAttribPointer( positionAttribute, 3, GLES20.GL_FLOAT, false, Resources.VERTEX_DATA_STRIDE_BYTES, resources.vertexPositions); GLUtil.checkGlError(TAG, "glVertexAttribPointer position"); GLES20.glEnableVertexAttribArray(positionAttribute); GLUtil.checkGlError(TAG, "glEnableVertexAttribArray position handle"); final int uvAttribute = GLES20.glGetAttribLocation(program, "aTextureCoord"); GLUtil.checkGlError(TAG, "glGetAttribLocation aTextureCoord"); if (uvAttribute >= 0) { GLES20.glVertexAttribPointer( uvAttribute, 2, GLES20.GL_FLOAT, false, Resources.VERTEX_DATA_STRIDE_BYTES, resources.vertexUVs); GLUtil.checkGlError(TAG, "glVertexAttribPointer uv handle"); GLES20.glEnableVertexAttribArray(uvAttribute); GLUtil.checkGlError(TAG, "glEnableVertexAttribArray uv handle"); } final int uMVPMatrix = GLES20.glGetUniformLocation(program, "uMVPMatrix"); GLUtil.checkGlError(TAG, "glGetUniformLocation uMVPMatrix"); if (uMVPMatrix == -1) { throw new RuntimeException("Could not get uniform location for uMVPMatrix"); } GLES20.glUniformMatrix4fv(uMVPMatrix, 1, false, perspectiveFromQuad, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, Resources.NUM_VERTICES); GLES20.glDisableVertexAttribArray(positionAttribute); if (uvAttribute >= 0) { GLES20.glDisableVertexAttribArray(uvAttribute); } GLUtil.checkGlError(TAG, "glDrawArrays"); if (settings.showFrameRateBar) { drawVideoFrameRateBar(perspectiveFromWorld); } } private void drawVideoFrameRateBar(float[] perspectiveFromWorld) { // When the frame rate is 90% or less of native, we interpret this as a "bad" state. float colorFpsFraction = Math.max(0.f, (currentFpsFraction - 0.9f) / 0.1f); // Adjust the size of the bar and offset it to align its left end with the left edge of the // video quad. frameRateBarFromQuad[0] = currentFpsFraction; frameRateBarFromQuad[12] = -1.f + currentFpsFraction; Matrix.multiplyMM(worldFromFrameRateBar, 0, worldFromQuad, 0, frameRateBarFromQuad, 0); Matrix.multiplyMM(perspectiveFromQuad, 0, perspectiveFromWorld, 0, worldFromFrameRateBar, 0); GLES20.glUseProgram(resources.solidColorProgram); final int uColor = GLES20.glGetUniformLocation(resources.solidColorProgram, "uColor"); // Fade between red and 80% gray when the video is DRM-protected. Fade between red and yellow // when the video is not protected. if (settings.useDrmVideoSample) { GLES20.glUniform4f(uColor, 1.f - 0.2f * colorFpsFraction, 0.8f * colorFpsFraction, 0.8f * colorFpsFraction, 1.f); } else { GLES20.glUniform4f(uColor, 0.5f + 0.5f * colorFpsFraction, colorFpsFraction, 0.f, 1.f); } final int positionAttribute = GLES20.glGetAttribLocation(resources.solidColorProgram, "aPosition"); GLES20.glVertexAttribPointer( positionAttribute, 3, GLES20.GL_FLOAT, false, Resources.VERTEX_DATA_STRIDE_BYTES, resources.vertexPositions); GLES20.glEnableVertexAttribArray(positionAttribute); final int uMVPMatrix = GLES20.glGetUniformLocation(resources.solidColorProgram, "uMVPMatrix"); GLES20.glUniformMatrix4fv(uMVPMatrix, 1, false, perspectiveFromQuad, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, Resources.NUM_VERTICES); GLES20.glDisableVertexAttribArray(positionAttribute); GLUtil.checkGlError(TAG, "frame rate bar"); } /** * Updates the average fraction of the native frame rate of the video achieved over the last N * seconds, based on the passed DecoderCounters object. * * @param averagingPeriodInSeconds Compute the average over this many seconds in the past. * @param nativeFrameRate Native frame rate of the video. * @param counters DecoderCounters object retrieved from the video decoder. */ public void updateVideoFpsFraction( long averagingPeriodInSeconds, float nativeFrameRate, DecoderCounters counters) { if (!settings.showFrameRateBar || counters == null) { currentFpsFraction = 0.f; return; } // Compute the frame rate over the last N seconds. final long nowTime = System.nanoTime(); final long cutoffTime = nowTime - TimeUnit.SECONDS.toNanos(averagingPeriodInSeconds); long pastTime = 0L; counters.ensureUpdated(); final int currentBufferCount = counters.renderedOutputBufferCount; int pastBufferCount = 0; // Insert the current buffer count into the map for future computations. frameCounts.put(nowTime, currentBufferCount); // Loop over the map, pruning outdated entries and stopping at the first one that is within the // cutoff time. for (Iterator<Map.Entry<Long, Integer>> iterator = frameCounts.entrySet().iterator(); iterator.hasNext(); ) { Map.Entry<Long, Integer> count = iterator.next(); if (count.getKey() < cutoffTime) { iterator.remove(); } else { pastTime = count.getKey(); pastBufferCount = count.getValue(); break; } } // Compute the average fraction of the frame rate and clamp it to [0, 1]. float elapsedSeconds = ((float) (nowTime - pastTime)) / 1e9f; float rawFraction = ((float) (currentBufferCount - pastBufferCount)) / (elapsedSeconds * nativeFrameRate); currentFpsFraction = Math.min(1.0f, Math.max(0.f, rawFraction)); } /** * Create and load OpenGL resources. * * This needs to be called every time the GL context is re-created. There is no release * counterpart for now, since GL resources are automatically cleaned up when the GL context * is destroyed. * * @param context Android activity context used to load the resources. */ public void prepareGLResources(Context context) { resources.prepare(context); } /** * Manages all GL resources used by video scenes. Only one copy of these resources is needed * for all VideoScene instances. */ private static final class Resources { static final String VERTEX_SHADER = "uniform mat4 uMVPMatrix;\n" + "attribute vec4 aPosition;\n" + "attribute vec4 aTextureCoord;\n" + "varying vec2 vTextureCoord;\n" + "void main() {\n" + " gl_Position = uMVPMatrix * aPosition;\n" + " vTextureCoord = aTextureCoord.st;\n" + "}\n"; static final String SPRITE_FRAGMENT_SHADER = "precision mediump float;\n" + "varying vec2 vTextureCoord;\n" + "uniform sampler2D uImageTexture;\n" + "void main() {\n" + " gl_FragColor = texture2D(uImageTexture, vTextureCoord);\n" + "}\n"; static final String SOLID_COLOR_FRAGMENT_SHADER = "precision mediump float;\n" + "uniform vec4 uColor;\n" + "varying vec2 vTextureCoord;\n" + "void main() {\n" + " gl_FragColor = uColor;\n" + "}\n"; static final float[] VERTEX_DATA = { // X, Y, Z, U, V -1.0f, 1.0f, 0.0f, 1, 1, 1.0f, 1.0f, 0.0f, 0, 1, -1.0f, -1.0f, 0.0f, 1, 0, 1.0f, -1.0f, 0.0f, 0, 0, }; static final int NUM_VERTICES = 4; static final int FLOAT_SIZE_BYTES = 4; static final int VERTEX_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; static final int VERTEX_DATA_POS_OFFSET = 0; static final int VERTEX_DATA_UV_OFFSET = 3; int solidColorProgram = 0; int spriteProgram = 0; int loadingTextureId = 0; FloatBuffer vertexPositions; FloatBuffer vertexUVs; /* package */ void prepare(Context context) { // Prepare shader programs. solidColorProgram = GLUtil.createProgram(VERTEX_SHADER, SOLID_COLOR_FRAGMENT_SHADER); if (solidColorProgram == 0) { throw new RuntimeException("Could not create video program"); } spriteProgram = GLUtil.createProgram(VERTEX_SHADER, SPRITE_FRAGMENT_SHADER); if (spriteProgram == 0) { throw new RuntimeException("Could not create sprite program"); } // Prepare vertex data. ByteBuffer vertices = ByteBuffer.allocateDirect(VERTEX_DATA.length * FLOAT_SIZE_BYTES) .order(ByteOrder.nativeOrder()); vertexPositions = vertices.asFloatBuffer(); vertexPositions.put(VERTEX_DATA); vertexPositions.position(VERTEX_DATA_POS_OFFSET); vertexUVs = vertices.asFloatBuffer(); vertexUVs.position(VERTEX_DATA_UV_OFFSET); // Load the texture to be shown instead of the video while the latter is initializing. int[] textureIds = new int[1]; GLES20.glGenTextures(1, textureIds, 0); loadingTextureId = textureIds[0]; GLUtil.createResourceTexture(context, loadingTextureId, R.raw.loading_bg); } } }
/* * The Dragonite Project * ------------------------- * See the LICENSE file in the root directory for license information. */ package com.vecsight.dragonite.sdk.socket; import com.vecsight.dragonite.sdk.config.DragoniteSocketParameters; import com.vecsight.dragonite.sdk.cryptor.PacketCryptor; import com.vecsight.dragonite.sdk.exception.ConnectionNotAliveException; import com.vecsight.dragonite.sdk.exception.IncorrectMessageException; import com.vecsight.dragonite.sdk.exception.IncorrectSizeException; import com.vecsight.dragonite.sdk.exception.SenderClosedException; import com.vecsight.dragonite.sdk.misc.DragoniteGlobalConstants; import com.vecsight.dragonite.sdk.msg.Message; import com.vecsight.dragonite.sdk.msg.MessageParser; import com.vecsight.dragonite.sdk.web.DevConsoleWebServer; import java.io.IOException; import java.net.*; import java.util.ArrayList; import java.util.Arrays; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; public class DragoniteClientSocket extends DragoniteSocket { //From parameters private final int packetSize, maxPacketBufferSize; private final int windowMultiplier; private final int resendMinDelayMS; private final int heartbeatIntervalSec, receiveTimeoutSec; private final boolean autoSplit; private final boolean enableWebPanel; private final PacketCryptor packetCryptor; private final int cryptorOverhead; //end private final Thread receiveThread; //THREAD private final Thread handleThread; //THREAD private final Thread aliveDetectThread; //THREAD private volatile boolean doReceive = true, doHandle = true, doAliveDetect = true; private final DatagramSocket datagramSocket; private final SocketAddress remoteAddress; private final BlockingQueue<DatagramPacket> packetBuffer; private final BucketPacketSender bucketPacketSender; private final ReceiveHandler receiver; private final ResendHandler resender; //THREAD private final SendHandler sender; private final ACKSender ackSender; //THREAD private final ConnectionState state = new ConnectionState(); private volatile boolean alive = true; private volatile long lastReceiveTime, lastSendTime; private final Object closeLock = new Object(); private volatile String description; private final DevConsoleWebServer devConsoleWebServer; private final InetSocketAddress devConsoleBindAddress; public DragoniteClientSocket(final SocketAddress remoteAddress, final long sendSpeed, final DragoniteSocketParameters parameters) throws SocketException { this.remoteAddress = remoteAddress; datagramSocket = new DatagramSocket(); //set from parameters packetSize = parameters.getPacketSize(); maxPacketBufferSize = parameters.getMaxPacketBufferSize(); windowMultiplier = parameters.getWindowMultiplier(); resendMinDelayMS = parameters.getResendMinDelayMS(); heartbeatIntervalSec = parameters.getHeartbeatIntervalSec(); receiveTimeoutSec = parameters.getReceiveTimeoutSec(); autoSplit = parameters.isAutoSplit(); enableWebPanel = parameters.isEnableWebPanel(); devConsoleBindAddress = parameters.getWebPanelBindAddress(); packetCryptor = parameters.getPacketCryptor(); cryptorOverhead = packetCryptor != null ? packetCryptor.getMaxAdditionalBytesLength() : 0; datagramSocket.setTrafficClass(parameters.getTrafficClass()); //end if (maxPacketBufferSize == 0) { packetBuffer = new LinkedBlockingQueue<>(); } else { packetBuffer = new LinkedBlockingQueue<>(maxPacketBufferSize); } updateLastReceiveTime(); bucketPacketSender = new BucketPacketSender(bytes -> sendPacket(bytes, remoteAddress), sendSpeed); ackSender = new ACKSender(this, bucketPacketSender, DragoniteGlobalConstants.ACK_INTERVAL_MS, packetSize); resender = new ResendHandler(this, bucketPacketSender, state, resendMinDelayMS, DragoniteGlobalConstants.ACK_INTERVAL_MS); receiver = new ReceiveHandler(this, ackSender, state, windowMultiplier, resender, packetSize); sender = new SendHandler(this, bucketPacketSender, receiver, state, resender, packetSize); description = "DCSocket"; receiveThread = new Thread(() -> { try { while (doReceive) { final byte[] b = new byte[packetSize + cryptorOverhead]; final DatagramPacket packet = new DatagramPacket(b, b.length); try { datagramSocket.receive(packet); packetBuffer.put(packet); } catch (final IOException ignored) { } } } catch (final InterruptedException ignored) { //okay } }, "DC-Receive"); receiveThread.start(); handleThread = new Thread(() -> { try { while (doHandle) { final DatagramPacket packet = packetBuffer.take(); handlePacket(packet); } } catch (final InterruptedException ignored) { //okay } }, "DC-PacketHandle"); handleThread.start(); aliveDetectThread = new Thread(() -> { try { while (doAliveDetect) { final long current = System.currentTimeMillis(); if (alive) { if (current - getLastReceiveTime() > receiveTimeoutSec * 1000) { destroy(); } else if (current - getLastSendTime() > heartbeatIntervalSec * 1000) { try { //TODO Fix blocking sendHeartbeat(); } catch (IOException | SenderClosedException ignored) { } } } else { doAliveDetect = false; } Thread.sleep(1000); } } catch (final InterruptedException ignored) { //okay } }, "DC-AliveDetect"); aliveDetectThread.start(); DevConsoleWebServer tmpServer = null; if (enableWebPanel) { try { tmpServer = new DevConsoleWebServer(devConsoleBindAddress, () -> { final ArrayList<DragoniteSocketStatistics> list = new ArrayList<>(1); list.add(getStatistics()); return list; }); } catch (final IOException ignored) { } } devConsoleWebServer = tmpServer; } private void handlePacket(final DatagramPacket packet) { Message message = null; try { final byte[] packetData = Arrays.copyOf(packet.getData(), packet.getLength()); final byte[] data = packetCryptor != null ? packetCryptor.decrypt(packetData) : packetData; if (data != null) message = MessageParser.parseMessage(data); } catch (final IncorrectMessageException ignored) { } if (message != null) { receiver.onHandleMessage(message, packet.getLength()); } } private void sendHeartbeat() throws InterruptedException, IOException, SenderClosedException { sender.sendHeartbeatMessage(); } //SEND ALL PACKETS THROUGH THIS!! private void sendPacket(final byte[] bytes, final SocketAddress socketAddress) throws IOException { final byte[] data = packetCryptor != null ? packetCryptor.encrypt(bytes) : bytes; if (data != null) { final DatagramPacket packet = new DatagramPacket(data, data.length); packet.setSocketAddress(socketAddress); datagramSocket.send(packet); updateLastSendTime(); } } @Override public byte[] read() throws InterruptedException, ConnectionNotAliveException { return receiver.read(); } @Override public void send(final byte[] bytes) throws InterruptedException, IncorrectSizeException, IOException, SenderClosedException { if (autoSplit) { sender.sendDataMessage_autoSplit(bytes); } else { sender.sendDataMessage_noSplit(bytes); } } @Override public DragoniteSocketStatistics getStatistics() { return new DragoniteSocketStatistics(remoteAddress, description, sender.getSendLength(), bucketPacketSender.getSendRawLength(), receiver.getReadLength(), receiver.getReceivedRawLength(), state.getEstimatedRTT(), state.getDevRTT(), resender.getTotalMessageCount(), resender.getResendCount(), receiver.getReceivedPktCount(), receiver.getDupPktCount()); } @Override public String getDescription() { return description; } @Override public void setDescription(final String description) { this.description = description; } @Override public boolean isAlive() { return alive; } @Override protected void closeSender() { sender.stopSend(); } @Override public void closeGracefully() throws InterruptedException, IOException, SenderClosedException { synchronized (closeLock) { if (alive) { sender.sendCloseMessage((short) 0, true, true); destroy(); } } } @Override public void destroy() { synchronized (closeLock) { if (alive) { alive = false; doReceive = false; doHandle = false; doAliveDetect = false; sender.stopSend(); receiver.close(); resender.close(); ackSender.close(); receiveThread.interrupt(); handleThread.interrupt(); aliveDetectThread.interrupt(); packetBuffer.clear(); datagramSocket.close(); if (devConsoleWebServer != null) { devConsoleWebServer.stop(); } } } } @Override protected void updateLastReceiveTime() { lastReceiveTime = System.currentTimeMillis(); } @Override public long getLastReceiveTime() { return lastReceiveTime; } @Override protected void updateLastSendTime() { lastSendTime = System.currentTimeMillis(); } @Override public long getLastSendTime() { return lastSendTime; } @Override public SocketAddress getRemoteSocketAddress() { return remoteAddress; } @Override public void setSendSpeed(final long sendSpeed) { bucketPacketSender.setSpeed(sendSpeed); } @Override public long getSendSpeed() { return bucketPacketSender.getSpeed(); } public int getPacketSize() { return packetSize; } public int getMaxPacketBufferSize() { return maxPacketBufferSize; } public int getWindowMultiplier() { return windowMultiplier; } public int getResendMinDelayMS() { return resendMinDelayMS; } public int getHeartbeatIntervalSec() { return heartbeatIntervalSec; } public int getReceiveTimeoutSec() { return receiveTimeoutSec; } public boolean isAutoSplit() { return autoSplit; } public boolean isEnableWebPanel() { return enableWebPanel; } }
package com.app.comic.ui.Presenter; import com.app.comic.ui.Model.Receive.DestinationReceive; import com.app.comic.ui.Model.Receive.DriverInfoReceive; import com.app.comic.ui.Model.Receive.ListRidesReceive; import com.app.comic.ui.Model.Receive.PassengerInfoReceive; import com.app.comic.ui.Model.Receive.SelectReceive; import com.app.comic.ui.Model.Receive.SignDriverReceive; import com.app.comic.ui.Model.Receive.SignPassengerReceive; import com.app.comic.ui.Model.Receive.LoginReceive; import com.app.comic.ui.Model.Receive.UpdateDriverReceive; import com.app.comic.ui.Model.Receive.UpdatePassengerReceive; import com.app.comic.ui.Model.Request.DestinationRequest; import com.app.comic.ui.Model.Request.DriverInfoRequest; import com.app.comic.ui.Model.Request.ListRidesRequest; import com.app.comic.ui.Model.Request.LoginRequest; import com.app.comic.ui.Model.Request.PassengerInfoRequest; import com.app.comic.ui.Model.Request.SelectRequest; import com.app.comic.ui.Model.Request.SignDriverRequest; import com.app.comic.ui.Model.Request.SignPassengerRequest; import com.app.comic.ui.Model.Request.UpdateDriverRequest; import com.app.comic.ui.Model.Request.UpdatePassengerRequest; import com.app.comic.utils.SharedPrefManager; import com.squareup.otto.Bus; import com.squareup.otto.Subscribe; public class HomePresenter { private SharedPrefManager pref; public interface LoginView { void onLoginReceive(LoginReceive event); } public interface SignPassengerView { void onSignPassengerReceive(SignPassengerReceive event); } public interface UpdatePassengerView { void onUpdatePassengerReceive(UpdatePassengerReceive event); } public interface DestinationView { void onDestinationReceive(DestinationReceive event); } public interface HomeView { void onPassengerInfoReceive(PassengerInfoReceive event); void onDriverInfoReceive(DriverInfoReceive event); } public interface UpdateDriverView { void onUpdateDriverReceive(UpdateDriverReceive event); } public interface SignDriverView { void onSignDriverReceive(SignDriverReceive event); } public interface ListRidesView { void onListRidesReceive(ListRidesReceive event); void onSelectReceive(SelectReceive event); } public interface SelectionView { void onSelectView(SelectReceive event); } public interface SplashScreen { void onConnectionFailed(); } private UpdatePassengerView updatePassengerView; private HomeView homeView; private SplashScreen view2; private LoginView loginView; private DestinationView destinationView; private SignPassengerView signPassengerView; private SignDriverView signDriverView; private SelectionView selectionView; private ListRidesView listRidesView; private UpdateDriverView updateDriverView; private final Bus bus; public HomePresenter(UpdatePassengerView view, Bus bus) { this.updatePassengerView = view; this.bus = bus; } public HomePresenter(LoginView view, Bus bus) { this.loginView = view; this.bus = bus; } public HomePresenter(ListRidesView view, Bus bus) { this.listRidesView = view; this.bus = bus; } public HomePresenter(UpdateDriverView view, Bus bus) { this.updateDriverView = view; this.bus = bus; } public HomePresenter(HomeView view, Bus bus) { this.homeView = view; this.bus = bus; } public HomePresenter(SelectionView view, Bus bus) { this.selectionView = view; this.bus = bus; } public HomePresenter(SignPassengerView view, Bus bus) { this.signPassengerView = view; this.bus = bus; } public HomePresenter(SignDriverView view, Bus bus) { this.signDriverView = view; this.bus = bus; } public HomePresenter(DestinationView view, Bus bus) { this.destinationView = view; this.bus = bus; } public void onUpdatePassengerRequest(UpdatePassengerRequest data) { bus.post(new UpdatePassengerRequest(data)); } public void onPassengerInfoRequest(PassengerInfoRequest data) { bus.post(new PassengerInfoRequest(data)); } public void onDriverInfoRequest(DriverInfoRequest data) { bus.post(new DriverInfoRequest(data)); } public void onListRequest(ListRidesRequest data) { bus.post(new ListRidesRequest(data)); } public void onUpdateDriverRequest(UpdateDriverRequest data) { bus.post(new UpdateDriverRequest(data)); } public void onDestinationRequest(DestinationRequest data) { bus.post(new DestinationRequest(data)); } public void onSelectRequest(SelectRequest data) { bus.post(new SelectRequest(data)); } public void onRegisterRequest(SignPassengerRequest data) { bus.post(new SignPassengerRequest(data)); } public void onLoginRequest(LoginRequest data) { bus.post(new LoginRequest(data)); } public void onSignDriverRequest(SignDriverRequest data) { bus.post(new SignDriverRequest(data)); } @Subscribe public void onUpdatePassengerReceive(UpdatePassengerReceive event) { updatePassengerView.onUpdatePassengerReceive(event); } @Subscribe public void onPassengerInfoReceive(PassengerInfoReceive event) { homeView.onPassengerInfoReceive(event); } @Subscribe public void onDriverInfoReceive(DriverInfoReceive event) { homeView.onDriverInfoReceive(event); } @Subscribe public void onUpdateDriverReceive(UpdateDriverReceive event) { updateDriverView.onUpdateDriverReceive(event); } @Subscribe public void onLoginReceive(LoginReceive event) { loginView.onLoginReceive(event); } @Subscribe public void onSelectReceive(SelectReceive event) { if (selectionView != null) { selectionView.onSelectView(event); } if (listRidesView != null) { listRidesView.onSelectReceive(event); } } @Subscribe public void onListRidesReceive(ListRidesReceive event) { listRidesView.onListRidesReceive(event); } @Subscribe public void onDestinationReceive(DestinationReceive event) { destinationView.onDestinationReceive(event); } @Subscribe public void onSignDriverReceive(SignDriverReceive event) { signDriverView.onSignDriverReceive(event); } @Subscribe public void onSignPassengerReceive(SignPassengerReceive event) { signPassengerView.onSignPassengerReceive(event); } public void onResume() { bus.register(this); } public void onPause() { bus.unregister(this); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.cassandra; import com.datastax.driver.core.AbstractTableMetadata; import com.datastax.driver.core.Cluster; import com.datastax.driver.core.ColumnMetadata; import com.datastax.driver.core.DataType; import com.datastax.driver.core.Host; import com.datastax.driver.core.IndexMetadata; import com.datastax.driver.core.KeyspaceMetadata; import com.datastax.driver.core.MaterializedViewMetadata; import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.RegularStatement; import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.Row; import com.datastax.driver.core.Session; import com.datastax.driver.core.Statement; import com.datastax.driver.core.TableMetadata; import com.datastax.driver.core.TokenRange; import com.datastax.driver.core.VersionNumber; import com.datastax.driver.core.exceptions.NoHostAvailableException; import com.datastax.driver.core.policies.ReconnectionPolicy; import com.datastax.driver.core.policies.ReconnectionPolicy.ReconnectionSchedule; import com.datastax.driver.core.querybuilder.Clause; import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.Select; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Ordering; import com.google.common.collect.Sets; import io.airlift.json.JsonCodec; import io.airlift.log.Logger; import io.airlift.units.Duration; import io.prestosql.plugin.cassandra.util.CassandraCqlUtils; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.connector.SchemaNotFoundException; import io.prestosql.spi.connector.SchemaTableName; import io.prestosql.spi.connector.TableNotFoundException; import io.prestosql.spi.predicate.NullableValue; import io.prestosql.spi.predicate.TupleDomain; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Stream; import static com.datastax.driver.core.querybuilder.QueryBuilder.eq; import static com.datastax.driver.core.querybuilder.QueryBuilder.select; import static com.datastax.driver.core.querybuilder.Select.Where; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Predicates.in; import static com.google.common.base.Predicates.not; import static com.google.common.base.Suppliers.memoize; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.Iterables.filter; import static com.google.common.collect.Iterables.transform; import static io.prestosql.plugin.cassandra.CassandraErrorCode.CASSANDRA_VERSION_ERROR; import static io.prestosql.plugin.cassandra.util.CassandraCqlUtils.validSchemaName; import static io.prestosql.spi.StandardErrorCode.NOT_SUPPORTED; import static java.lang.String.format; import static java.util.Comparator.comparing; import static java.util.Locale.ENGLISH; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; public class NativeCassandraSession implements CassandraSession { private static final Logger log = Logger.get(NativeCassandraSession.class); private static final String PRESTO_COMMENT_METADATA = "Presto Metadata:"; private static final String SYSTEM = "system"; private static final String SIZE_ESTIMATES = "size_estimates"; private static final VersionNumber PARTITION_FETCH_WITH_IN_PREDICATE_VERSION = VersionNumber.parse("2.2"); private final String connectorId; private final JsonCodec<List<ExtraColumnMetadata>> extraColumnMetadataCodec; private final Cluster cluster; private final Supplier<Session> session; private final Duration noHostAvailableRetryTimeout; public NativeCassandraSession(String connectorId, JsonCodec<List<ExtraColumnMetadata>> extraColumnMetadataCodec, Cluster cluster, Duration noHostAvailableRetryTimeout) { this.connectorId = requireNonNull(connectorId, "connectorId is null"); this.extraColumnMetadataCodec = requireNonNull(extraColumnMetadataCodec, "extraColumnMetadataCodec is null"); this.cluster = requireNonNull(cluster, "cluster is null"); this.noHostAvailableRetryTimeout = requireNonNull(noHostAvailableRetryTimeout, "noHostAvailableRetryTimeout is null"); this.session = memoize(cluster::connect); } @Override public VersionNumber getCassandraVersion() { ResultSet result = executeWithSession(session -> session.execute("select release_version from system.local")); Row versionRow = result.one(); if (versionRow == null) { throw new PrestoException(CASSANDRA_VERSION_ERROR, "The cluster version is not available. " + "Please make sure that the Cassandra cluster is up and running, " + "and that the contact points are specified correctly."); } return VersionNumber.parse(versionRow.getString("release_version")); } @Override public String getPartitioner() { return executeWithSession(session -> session.getCluster().getMetadata().getPartitioner()); } @Override public Set<TokenRange> getTokenRanges() { return executeWithSession(session -> session.getCluster().getMetadata().getTokenRanges()); } @Override public Set<Host> getReplicas(String caseSensitiveSchemaName, TokenRange tokenRange) { requireNonNull(caseSensitiveSchemaName, "keyspace is null"); requireNonNull(tokenRange, "tokenRange is null"); return executeWithSession(session -> session.getCluster().getMetadata().getReplicas(validSchemaName(caseSensitiveSchemaName), tokenRange)); } @Override public Set<Host> getReplicas(String caseSensitiveSchemaName, ByteBuffer partitionKey) { requireNonNull(caseSensitiveSchemaName, "keyspace is null"); requireNonNull(partitionKey, "partitionKey is null"); return executeWithSession(session -> session.getCluster().getMetadata().getReplicas(validSchemaName(caseSensitiveSchemaName), partitionKey)); } @Override public String getCaseSensitiveSchemaName(String caseInsensitiveSchemaName) { return getKeyspaceByCaseInsensitiveName(caseInsensitiveSchemaName).getName(); } @Override public List<String> getCaseSensitiveSchemaNames() { ImmutableList.Builder<String> builder = ImmutableList.builder(); List<KeyspaceMetadata> keyspaces = executeWithSession(session -> session.getCluster().getMetadata().getKeyspaces()); for (KeyspaceMetadata meta : keyspaces) { builder.add(meta.getName()); } return builder.build(); } @Override public List<String> getCaseSensitiveTableNames(String caseInsensitiveSchemaName) throws SchemaNotFoundException { KeyspaceMetadata keyspace = getKeyspaceByCaseInsensitiveName(caseInsensitiveSchemaName); ImmutableList.Builder<String> builder = ImmutableList.builder(); for (TableMetadata table : keyspace.getTables()) { builder.add(table.getName()); } for (MaterializedViewMetadata materializedView : keyspace.getMaterializedViews()) { builder.add(materializedView.getName()); } return builder.build(); } @Override public CassandraTable getTable(SchemaTableName schemaTableName) throws TableNotFoundException { KeyspaceMetadata keyspace = getKeyspaceByCaseInsensitiveName(schemaTableName.getSchemaName()); AbstractTableMetadata tableMeta = getTableMetadata(keyspace, schemaTableName.getTableName()); List<String> columnNames = new ArrayList<>(); List<ColumnMetadata> columns = tableMeta.getColumns(); checkColumnNames(columns); for (ColumnMetadata columnMetadata : columns) { columnNames.add(columnMetadata.getName()); } // check if there is a comment to establish column ordering String comment = tableMeta.getOptions().getComment(); Set<String> hiddenColumns = ImmutableSet.of(); if (comment != null && comment.startsWith(PRESTO_COMMENT_METADATA)) { String columnOrderingString = comment.substring(PRESTO_COMMENT_METADATA.length()); // column ordering List<ExtraColumnMetadata> extras = extraColumnMetadataCodec.fromJson(columnOrderingString); List<String> explicitColumnOrder = new ArrayList<>(ImmutableList.copyOf(transform(extras, ExtraColumnMetadata::getName))); hiddenColumns = ImmutableSet.copyOf(transform(filter(extras, ExtraColumnMetadata::isHidden), ExtraColumnMetadata::getName)); // add columns not in the comment to the ordering Iterables.addAll(explicitColumnOrder, filter(columnNames, not(in(explicitColumnOrder)))); // sort the actual columns names using the explicit column order (this allows for missing columns) columnNames = Ordering.explicit(explicitColumnOrder).sortedCopy(columnNames); } ImmutableList.Builder<CassandraColumnHandle> columnHandles = ImmutableList.builder(); // add primary keys first Set<String> primaryKeySet = new HashSet<>(); for (ColumnMetadata columnMeta : tableMeta.getPartitionKey()) { primaryKeySet.add(columnMeta.getName()); boolean hidden = hiddenColumns.contains(columnMeta.getName()); CassandraColumnHandle columnHandle = buildColumnHandle(tableMeta, columnMeta, true, false, columnNames.indexOf(columnMeta.getName()), hidden); columnHandles.add(columnHandle); } // add clustering columns for (ColumnMetadata columnMeta : tableMeta.getClusteringColumns()) { primaryKeySet.add(columnMeta.getName()); boolean hidden = hiddenColumns.contains(columnMeta.getName()); CassandraColumnHandle columnHandle = buildColumnHandle(tableMeta, columnMeta, false, true, columnNames.indexOf(columnMeta.getName()), hidden); columnHandles.add(columnHandle); } // add other columns for (ColumnMetadata columnMeta : columns) { if (!primaryKeySet.contains(columnMeta.getName())) { boolean hidden = hiddenColumns.contains(columnMeta.getName()); CassandraColumnHandle columnHandle = buildColumnHandle(tableMeta, columnMeta, false, false, columnNames.indexOf(columnMeta.getName()), hidden); columnHandles.add(columnHandle); } } List<CassandraColumnHandle> sortedColumnHandles = columnHandles.build().stream() .sorted(comparing(CassandraColumnHandle::getOrdinalPosition)) .collect(toList()); CassandraTableHandle tableHandle = new CassandraTableHandle(connectorId, tableMeta.getKeyspace().getName(), tableMeta.getName()); return new CassandraTable(tableHandle, sortedColumnHandles); } private KeyspaceMetadata getKeyspaceByCaseInsensitiveName(String caseInsensitiveSchemaName) throws SchemaNotFoundException { List<KeyspaceMetadata> keyspaces = executeWithSession(session -> session.getCluster().getMetadata().getKeyspaces()); KeyspaceMetadata result = null; // Ensure that the error message is deterministic List<KeyspaceMetadata> sortedKeyspaces = Ordering.from(comparing(KeyspaceMetadata::getName)).immutableSortedCopy(keyspaces); for (KeyspaceMetadata keyspace : sortedKeyspaces) { if (keyspace.getName().equalsIgnoreCase(caseInsensitiveSchemaName)) { if (result != null) { throw new PrestoException( NOT_SUPPORTED, format("More than one keyspace has been found for the case insensitive schema name: %s -> (%s, %s)", caseInsensitiveSchemaName, result.getName(), keyspace.getName())); } result = keyspace; } } if (result == null) { throw new SchemaNotFoundException(caseInsensitiveSchemaName); } return result; } private static AbstractTableMetadata getTableMetadata(KeyspaceMetadata keyspace, String caseInsensitiveTableName) { List<AbstractTableMetadata> tables = Stream.concat( keyspace.getTables().stream(), keyspace.getMaterializedViews().stream()) .filter(table -> table.getName().equalsIgnoreCase(caseInsensitiveTableName)) .collect(toImmutableList()); if (tables.size() == 0) { throw new TableNotFoundException(new SchemaTableName(keyspace.getName(), caseInsensitiveTableName)); } else if (tables.size() == 1) { return tables.get(0); } String tableNames = tables.stream() .map(AbstractTableMetadata::getName) .sorted() .collect(joining(", ")); throw new PrestoException( NOT_SUPPORTED, format("More than one table has been found for the case insensitive table name: %s -> (%s)", caseInsensitiveTableName, tableNames)); } public boolean isMaterializedView(SchemaTableName schemaTableName) { KeyspaceMetadata keyspace = getKeyspaceByCaseInsensitiveName(schemaTableName.getSchemaName()); return keyspace.getMaterializedView(schemaTableName.getTableName()) != null; } private static void checkColumnNames(List<ColumnMetadata> columns) { Map<String, ColumnMetadata> lowercaseNameToColumnMap = new HashMap<>(); for (ColumnMetadata column : columns) { String lowercaseName = column.getName().toLowerCase(ENGLISH); if (lowercaseNameToColumnMap.containsKey(lowercaseName)) { throw new PrestoException( NOT_SUPPORTED, format("More than one column has been found for the case insensitive column name: %s -> (%s, %s)", lowercaseName, lowercaseNameToColumnMap.get(lowercaseName).getName(), column.getName())); } lowercaseNameToColumnMap.put(lowercaseName, column); } } private CassandraColumnHandle buildColumnHandle(AbstractTableMetadata tableMetadata, ColumnMetadata columnMeta, boolean partitionKey, boolean clusteringKey, int ordinalPosition, boolean hidden) { CassandraType cassandraType = CassandraType.getCassandraType(columnMeta.getType().getName()); List<CassandraType> typeArguments = null; if (cassandraType != null && cassandraType.getTypeArgumentSize() > 0) { List<DataType> typeArgs = columnMeta.getType().getTypeArguments(); switch (cassandraType.getTypeArgumentSize()) { case 1: typeArguments = ImmutableList.of(CassandraType.getCassandraType(typeArgs.get(0).getName())); break; case 2: typeArguments = ImmutableList.of(CassandraType.getCassandraType(typeArgs.get(0).getName()), CassandraType.getCassandraType(typeArgs.get(1).getName())); break; default: throw new IllegalArgumentException("Invalid type arguments: " + typeArgs); } } boolean indexed = false; SchemaTableName schemaTableName = new SchemaTableName(tableMetadata.getKeyspace().getName(), tableMetadata.getName()); if (!isMaterializedView(schemaTableName)) { TableMetadata table = (TableMetadata) tableMetadata; for (IndexMetadata idx : table.getIndexes()) { if (idx.getTarget().equals(columnMeta.getName())) { indexed = true; break; } } } return new CassandraColumnHandle(connectorId, columnMeta.getName(), ordinalPosition, cassandraType, typeArguments, partitionKey, clusteringKey, indexed, hidden); } @Override public List<CassandraPartition> getPartitions(CassandraTable table, List<Set<Object>> filterPrefixes) { List<CassandraColumnHandle> partitionKeyColumns = table.getPartitionKeyColumns(); if (filterPrefixes.size() != partitionKeyColumns.size()) { return ImmutableList.of(CassandraPartition.UNPARTITIONED); } Iterable<Row> rows; if (getCassandraVersion().compareTo(PARTITION_FETCH_WITH_IN_PREDICATE_VERSION) > 0) { log.debug("Using IN predicate to fetch partitions."); rows = queryPartitionKeysWithInClauses(table, filterPrefixes); } else { log.debug("Using combination of partition values to fetch partitions."); rows = queryPartitionKeysLegacyWithMultipleQueries(table, filterPrefixes); } if (rows == null) { // just split the whole partition range return ImmutableList.of(CassandraPartition.UNPARTITIONED); } ByteBuffer buffer = ByteBuffer.allocate(1000); HashMap<ColumnHandle, NullableValue> map = new HashMap<>(); Set<String> uniquePartitionIds = new HashSet<>(); StringBuilder stringBuilder = new StringBuilder(); boolean isComposite = partitionKeyColumns.size() > 1; ImmutableList.Builder<CassandraPartition> partitions = ImmutableList.builder(); for (Row row : rows) { buffer.clear(); map.clear(); stringBuilder.setLength(0); for (int i = 0; i < partitionKeyColumns.size(); i++) { ByteBuffer component = row.getBytesUnsafe(i); if (isComposite) { // build composite key short len = (short) component.limit(); buffer.putShort(len); buffer.put(component); buffer.put((byte) 0); } else { buffer.put(component); } CassandraColumnHandle columnHandle = partitionKeyColumns.get(i); NullableValue keyPart = CassandraType.getColumnValueForPartitionKey(row, i, columnHandle.getCassandraType(), columnHandle.getTypeArguments()); map.put(columnHandle, keyPart); if (i > 0) { stringBuilder.append(" AND "); } stringBuilder.append(CassandraCqlUtils.validColumnName(columnHandle.getName())); stringBuilder.append(" = "); stringBuilder.append(CassandraType.getColumnValueForCql(row, i, columnHandle.getCassandraType())); } buffer.flip(); byte[] key = new byte[buffer.limit()]; buffer.get(key); TupleDomain<ColumnHandle> tupleDomain = TupleDomain.fromFixedValues(map); String partitionId = stringBuilder.toString(); if (uniquePartitionIds.add(partitionId)) { partitions.add(new CassandraPartition(key, partitionId, tupleDomain, false)); } } return partitions.build(); } @Override public ResultSet execute(String cql, Object... values) { return executeWithSession(session -> session.execute(cql, values)); } @Override public PreparedStatement prepare(RegularStatement statement) { return executeWithSession(session -> session.prepare(statement)); } @Override public ResultSet execute(Statement statement) { return executeWithSession(session -> session.execute(statement)); } private Iterable<Row> queryPartitionKeysWithInClauses(CassandraTable table, List<Set<Object>> filterPrefixes) { CassandraTableHandle tableHandle = table.getTableHandle(); List<CassandraColumnHandle> partitionKeyColumns = table.getPartitionKeyColumns(); Select partitionKeys = CassandraCqlUtils.selectDistinctFrom(tableHandle, partitionKeyColumns); addWhereInClauses(partitionKeys.where(), partitionKeyColumns, filterPrefixes); return execute(partitionKeys).all(); } private Iterable<Row> queryPartitionKeysLegacyWithMultipleQueries(CassandraTable table, List<Set<Object>> filterPrefixes) { CassandraTableHandle tableHandle = table.getTableHandle(); List<CassandraColumnHandle> partitionKeyColumns = table.getPartitionKeyColumns(); Set<List<Object>> filterCombinations = Sets.cartesianProduct(filterPrefixes); ImmutableList.Builder<Row> rowList = ImmutableList.builder(); for (List<Object> combination : filterCombinations) { Select partitionKeys = CassandraCqlUtils.selectDistinctFrom(tableHandle, partitionKeyColumns); addWhereClause(partitionKeys.where(), partitionKeyColumns, combination); List<Row> resultRows = execute(partitionKeys).all(); if (resultRows != null && !resultRows.isEmpty()) { rowList.addAll(resultRows); } } return rowList.build(); } private static void addWhereInClauses(Where where, List<CassandraColumnHandle> partitionKeyColumns, List<Set<Object>> filterPrefixes) { for (int i = 0; i < filterPrefixes.size(); i++) { CassandraColumnHandle column = partitionKeyColumns.get(i); List<Object> values = filterPrefixes.get(i) .stream() .map(value -> column.getCassandraType().getJavaValue(value)) .collect(toList()); Clause clause = QueryBuilder.in(CassandraCqlUtils.validColumnName(column.getName()), values); where.and(clause); } } private static void addWhereClause(Where where, List<CassandraColumnHandle> partitionKeyColumns, List<Object> filterPrefix) { for (int i = 0; i < filterPrefix.size(); i++) { CassandraColumnHandle column = partitionKeyColumns.get(i); Object value = column.getCassandraType().getJavaValue(filterPrefix.get(i)); Clause clause = QueryBuilder.eq(CassandraCqlUtils.validColumnName(column.getName()), value); where.and(clause); } } @Override public List<SizeEstimate> getSizeEstimates(String keyspaceName, String tableName) { checkSizeEstimatesTableExist(); Statement statement = select("range_start", "range_end", "mean_partition_size", "partitions_count") .from(SYSTEM, SIZE_ESTIMATES) .where(eq("keyspace_name", keyspaceName)) .and(eq("table_name", tableName)); ResultSet result = executeWithSession(session -> session.execute(statement)); ImmutableList.Builder<SizeEstimate> estimates = ImmutableList.builder(); for (Row row : result.all()) { SizeEstimate estimate = new SizeEstimate( row.getString("range_start"), row.getString("range_end"), row.getLong("mean_partition_size"), row.getLong("partitions_count")); estimates.add(estimate); } return estimates.build(); } private void checkSizeEstimatesTableExist() { KeyspaceMetadata keyspaceMetadata = executeWithSession(session -> session.getCluster().getMetadata().getKeyspace(SYSTEM)); checkState(keyspaceMetadata != null, "system keyspace metadata must not be null"); TableMetadata table = keyspaceMetadata.getTable(SIZE_ESTIMATES); if (table == null) { throw new PrestoException(NOT_SUPPORTED, "Cassandra versions prior to 2.1.5 are not supported"); } } private <T> T executeWithSession(SessionCallable<T> sessionCallable) { ReconnectionPolicy reconnectionPolicy = cluster.getConfiguration().getPolicies().getReconnectionPolicy(); ReconnectionSchedule schedule = reconnectionPolicy.newSchedule(); long deadline = System.currentTimeMillis() + noHostAvailableRetryTimeout.toMillis(); while (true) { try { return sessionCallable.executeWithSession(session.get()); } catch (NoHostAvailableException e) { long timeLeft = deadline - System.currentTimeMillis(); if (timeLeft <= 0) { throw e; } else { long delay = Math.min(schedule.nextDelayMs(), timeLeft); log.warn(e.getCustomMessage(10, true, true)); log.warn("Reconnecting in %dms", delay); try { Thread.sleep(delay); } catch (InterruptedException interrupted) { Thread.currentThread().interrupt(); throw new RuntimeException("interrupted", interrupted); } } } } } private interface SessionCallable<T> { T executeWithSession(Session session); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.protocolPB; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclEntryScope; import org.apache.hadoop.fs.permission.AclEntryType; import org.apache.hadoop.fs.permission.AclStatus; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.StripedFileTestUtil; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECRecoveryCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockRecoveryCommandProto; import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeRegistrationProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeStorageProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.BlockKeyProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.BlockWithLocationsProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.BlocksWithLocationsProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.CheckpointSignatureProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.ExportedBlockKeysProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.NamenodeRegistrationProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.NamenodeRegistrationProto.NamenodeRoleProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.NamespaceInfoProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.RecoveringBlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.RemoteEditLogManifestProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.RemoteEditLogProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsServerProtos.StorageInfoProto; import org.apache.hadoop.hdfs.security.token.block.BlockKey; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.namenode.CheckpointSignature; import org.apache.hadoop.hdfs.server.namenode.ErasureCodingPolicyManager; import org.apache.hadoop.hdfs.server.protocol.BlockCommand; import org.apache.hadoop.hdfs.server.protocol.BlockECRecoveryCommand.BlockECRecoveryInfo; import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand; import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock; import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations; import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations; import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.StripedBlockWithLocations; import org.apache.hadoop.hdfs.server.protocol.BlockECRecoveryCommand; import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage; import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; import org.apache.hadoop.io.Text; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum; import org.junit.Assert; import org.junit.Test; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; /** * Tests for {@link PBHelper} */ public class TestPBHelper { /** * Used for asserting equality on doubles. */ private static final double DELTA = 0.000001; @Test public void testConvertNamenodeRole() { assertEquals(NamenodeRoleProto.BACKUP, PBHelper.convert(NamenodeRole.BACKUP)); assertEquals(NamenodeRoleProto.CHECKPOINT, PBHelper.convert(NamenodeRole.CHECKPOINT)); assertEquals(NamenodeRoleProto.NAMENODE, PBHelper.convert(NamenodeRole.NAMENODE)); assertEquals(NamenodeRole.BACKUP, PBHelper.convert(NamenodeRoleProto.BACKUP)); assertEquals(NamenodeRole.CHECKPOINT, PBHelper.convert(NamenodeRoleProto.CHECKPOINT)); assertEquals(NamenodeRole.NAMENODE, PBHelper.convert(NamenodeRoleProto.NAMENODE)); } private static StorageInfo getStorageInfo(NodeType type) { return new StorageInfo(1, 2, "cid", 3, type); } @Test public void testConvertStoragInfo() { StorageInfo info = getStorageInfo(NodeType.NAME_NODE); StorageInfoProto infoProto = PBHelper.convert(info); StorageInfo info2 = PBHelper.convert(infoProto, NodeType.NAME_NODE); assertEquals(info.getClusterID(), info2.getClusterID()); assertEquals(info.getCTime(), info2.getCTime()); assertEquals(info.getLayoutVersion(), info2.getLayoutVersion()); assertEquals(info.getNamespaceID(), info2.getNamespaceID()); } @Test public void testConvertNamenodeRegistration() { StorageInfo info = getStorageInfo(NodeType.NAME_NODE); NamenodeRegistration reg = new NamenodeRegistration("address:999", "http:1000", info, NamenodeRole.NAMENODE); NamenodeRegistrationProto regProto = PBHelper.convert(reg); NamenodeRegistration reg2 = PBHelper.convert(regProto); assertEquals(reg.getAddress(), reg2.getAddress()); assertEquals(reg.getClusterID(), reg2.getClusterID()); assertEquals(reg.getCTime(), reg2.getCTime()); assertEquals(reg.getHttpAddress(), reg2.getHttpAddress()); assertEquals(reg.getLayoutVersion(), reg2.getLayoutVersion()); assertEquals(reg.getNamespaceID(), reg2.getNamespaceID()); assertEquals(reg.getRegistrationID(), reg2.getRegistrationID()); assertEquals(reg.getRole(), reg2.getRole()); assertEquals(reg.getVersion(), reg2.getVersion()); } @Test public void testConvertDatanodeID() { DatanodeID dn = DFSTestUtil.getLocalDatanodeID(); DatanodeIDProto dnProto = PBHelperClient.convert(dn); DatanodeID dn2 = PBHelperClient.convert(dnProto); compare(dn, dn2); } void compare(DatanodeID dn, DatanodeID dn2) { assertEquals(dn.getIpAddr(), dn2.getIpAddr()); assertEquals(dn.getHostName(), dn2.getHostName()); assertEquals(dn.getDatanodeUuid(), dn2.getDatanodeUuid()); assertEquals(dn.getXferPort(), dn2.getXferPort()); assertEquals(dn.getInfoPort(), dn2.getInfoPort()); assertEquals(dn.getIpcPort(), dn2.getIpcPort()); } void compare(DatanodeStorage dns1, DatanodeStorage dns2) { assertThat(dns2.getStorageID(), is(dns1.getStorageID())); assertThat(dns2.getState(), is(dns1.getState())); assertThat(dns2.getStorageType(), is(dns1.getStorageType())); } @Test public void testConvertBlock() { Block b = new Block(1, 100, 3); BlockProto bProto = PBHelperClient.convert(b); Block b2 = PBHelperClient.convert(bProto); assertEquals(b, b2); } private static BlockWithLocations getBlockWithLocations( int bid, boolean isStriped) { final String[] datanodeUuids = {"dn1", "dn2", "dn3"}; final String[] storageIDs = {"s1", "s2", "s3"}; final StorageType[] storageTypes = { StorageType.DISK, StorageType.DISK, StorageType.DISK}; final byte[] indices = {0, 1, 2}; final short dataBlkNum = 6; BlockWithLocations blkLocs = new BlockWithLocations(new Block(bid, 0, 1), datanodeUuids, storageIDs, storageTypes); if (isStriped) { blkLocs = new StripedBlockWithLocations(blkLocs, indices, dataBlkNum, StripedFileTestUtil.BLOCK_STRIPED_CELL_SIZE); } return blkLocs; } private void compare(BlockWithLocations locs1, BlockWithLocations locs2) { assertEquals(locs1.getBlock(), locs2.getBlock()); assertTrue(Arrays.equals(locs1.getStorageIDs(), locs2.getStorageIDs())); if (locs1 instanceof StripedBlockWithLocations) { assertTrue(Arrays.equals(((StripedBlockWithLocations) locs1).getIndices(), ((StripedBlockWithLocations) locs2).getIndices())); } } @Test public void testConvertBlockWithLocations() { boolean[] testSuite = new boolean[]{false, true}; for (int i = 0; i < testSuite.length; i++) { BlockWithLocations locs = getBlockWithLocations(1, testSuite[i]); BlockWithLocationsProto locsProto = PBHelper.convert(locs); BlockWithLocations locs2 = PBHelper.convert(locsProto); compare(locs, locs2); } } @Test public void testConvertBlocksWithLocations() { boolean[] testSuite = new boolean[]{false, true}; for (int i = 0; i < testSuite.length; i++) { BlockWithLocations[] list = new BlockWithLocations[]{ getBlockWithLocations(1, testSuite[i]), getBlockWithLocations(2, testSuite[i])}; BlocksWithLocations locs = new BlocksWithLocations(list); BlocksWithLocationsProto locsProto = PBHelper.convert(locs); BlocksWithLocations locs2 = PBHelper.convert(locsProto); BlockWithLocations[] blocks = locs.getBlocks(); BlockWithLocations[] blocks2 = locs2.getBlocks(); assertEquals(blocks.length, blocks2.length); for (int j = 0; j < blocks.length; j++) { compare(blocks[j], blocks2[j]); } } } private static BlockKey getBlockKey(int keyId) { return new BlockKey(keyId, 10, "encodedKey".getBytes()); } private void compare(BlockKey k1, BlockKey k2) { assertEquals(k1.getExpiryDate(), k2.getExpiryDate()); assertEquals(k1.getKeyId(), k2.getKeyId()); assertTrue(Arrays.equals(k1.getEncodedKey(), k2.getEncodedKey())); } @Test public void testConvertBlockKey() { BlockKey key = getBlockKey(1); BlockKeyProto keyProto = PBHelper.convert(key); BlockKey key1 = PBHelper.convert(keyProto); compare(key, key1); } @Test public void testConvertExportedBlockKeys() { BlockKey[] keys = new BlockKey[] { getBlockKey(2), getBlockKey(3) }; ExportedBlockKeys expKeys = new ExportedBlockKeys(true, 9, 10, getBlockKey(1), keys); ExportedBlockKeysProto expKeysProto = PBHelper.convert(expKeys); ExportedBlockKeys expKeys1 = PBHelper.convert(expKeysProto); compare(expKeys, expKeys1); } void compare(ExportedBlockKeys expKeys, ExportedBlockKeys expKeys1) { BlockKey[] allKeys = expKeys.getAllKeys(); BlockKey[] allKeys1 = expKeys1.getAllKeys(); assertEquals(allKeys.length, allKeys1.length); for (int i = 0; i < allKeys.length; i++) { compare(allKeys[i], allKeys1[i]); } compare(expKeys.getCurrentKey(), expKeys1.getCurrentKey()); assertEquals(expKeys.getKeyUpdateInterval(), expKeys1.getKeyUpdateInterval()); assertEquals(expKeys.getTokenLifetime(), expKeys1.getTokenLifetime()); } @Test public void testConvertCheckpointSignature() { CheckpointSignature s = new CheckpointSignature( getStorageInfo(NodeType.NAME_NODE), "bpid", 100, 1); CheckpointSignatureProto sProto = PBHelper.convert(s); CheckpointSignature s1 = PBHelper.convert(sProto); assertEquals(s.getBlockpoolID(), s1.getBlockpoolID()); assertEquals(s.getClusterID(), s1.getClusterID()); assertEquals(s.getCTime(), s1.getCTime()); assertEquals(s.getCurSegmentTxId(), s1.getCurSegmentTxId()); assertEquals(s.getLayoutVersion(), s1.getLayoutVersion()); assertEquals(s.getMostRecentCheckpointTxId(), s1.getMostRecentCheckpointTxId()); assertEquals(s.getNamespaceID(), s1.getNamespaceID()); } private static void compare(RemoteEditLog l1, RemoteEditLog l2) { assertEquals(l1.getEndTxId(), l2.getEndTxId()); assertEquals(l1.getStartTxId(), l2.getStartTxId()); } @Test public void testConvertRemoteEditLog() { RemoteEditLog l = new RemoteEditLog(1, 100); RemoteEditLogProto lProto = PBHelper.convert(l); RemoteEditLog l1 = PBHelper.convert(lProto); compare(l, l1); } @Test public void testConvertRemoteEditLogManifest() { List<RemoteEditLog> logs = new ArrayList<RemoteEditLog>(); logs.add(new RemoteEditLog(1, 10)); logs.add(new RemoteEditLog(11, 20)); RemoteEditLogManifest m = new RemoteEditLogManifest(logs); RemoteEditLogManifestProto mProto = PBHelper.convert(m); RemoteEditLogManifest m1 = PBHelper.convert(mProto); List<RemoteEditLog> logs1 = m1.getLogs(); assertEquals(logs.size(), logs1.size()); for (int i = 0; i < logs.size(); i++) { compare(logs.get(i), logs1.get(i)); } } public ExtendedBlock getExtendedBlock() { return getExtendedBlock(1); } public ExtendedBlock getExtendedBlock(long blkid) { return new ExtendedBlock("bpid", blkid, 100, 2); } private void compare(DatanodeInfo dn1, DatanodeInfo dn2) { assertEquals(dn1.getAdminState(), dn2.getAdminState()); assertEquals(dn1.getBlockPoolUsed(), dn2.getBlockPoolUsed()); assertEquals(dn1.getBlockPoolUsedPercent(), dn2.getBlockPoolUsedPercent(), DELTA); assertEquals(dn1.getCapacity(), dn2.getCapacity()); assertEquals(dn1.getDatanodeReport(), dn2.getDatanodeReport()); assertEquals(dn1.getDfsUsed(), dn1.getDfsUsed()); assertEquals(dn1.getDfsUsedPercent(), dn1.getDfsUsedPercent(), DELTA); assertEquals(dn1.getIpAddr(), dn2.getIpAddr()); assertEquals(dn1.getHostName(), dn2.getHostName()); assertEquals(dn1.getInfoPort(), dn2.getInfoPort()); assertEquals(dn1.getIpcPort(), dn2.getIpcPort()); assertEquals(dn1.getLastUpdate(), dn2.getLastUpdate()); assertEquals(dn1.getLevel(), dn2.getLevel()); assertEquals(dn1.getNetworkLocation(), dn2.getNetworkLocation()); } @Test public void testConvertExtendedBlock() { ExtendedBlock b = getExtendedBlock(); ExtendedBlockProto bProto = PBHelperClient.convert(b); ExtendedBlock b1 = PBHelperClient.convert(bProto); assertEquals(b, b1); b.setBlockId(-1); bProto = PBHelperClient.convert(b); b1 = PBHelperClient.convert(bProto); assertEquals(b, b1); } @Test public void testConvertRecoveringBlock() { DatanodeInfo di1 = DFSTestUtil.getLocalDatanodeInfo(); DatanodeInfo di2 = DFSTestUtil.getLocalDatanodeInfo(); DatanodeInfo[] dnInfo = new DatanodeInfo[] { di1, di2 }; RecoveringBlock b = new RecoveringBlock(getExtendedBlock(), dnInfo, 3); RecoveringBlockProto bProto = PBHelper.convert(b); RecoveringBlock b1 = PBHelper.convert(bProto); assertEquals(b.getBlock(), b1.getBlock()); DatanodeInfo[] dnInfo1 = b1.getLocations(); assertEquals(dnInfo.length, dnInfo1.length); for (int i=0; i < dnInfo.length; i++) { compare(dnInfo[0], dnInfo1[0]); } } @Test public void testConvertBlockRecoveryCommand() { DatanodeInfo di1 = DFSTestUtil.getLocalDatanodeInfo(); DatanodeInfo di2 = DFSTestUtil.getLocalDatanodeInfo(); DatanodeInfo[] dnInfo = new DatanodeInfo[] { di1, di2 }; List<RecoveringBlock> blks = ImmutableList.of( new RecoveringBlock(getExtendedBlock(1), dnInfo, 3), new RecoveringBlock(getExtendedBlock(2), dnInfo, 3) ); BlockRecoveryCommand cmd = new BlockRecoveryCommand(blks); BlockRecoveryCommandProto proto = PBHelper.convert(cmd); assertEquals(1, proto.getBlocks(0).getBlock().getB().getBlockId()); assertEquals(2, proto.getBlocks(1).getBlock().getB().getBlockId()); BlockRecoveryCommand cmd2 = PBHelper.convert(proto); List<RecoveringBlock> cmd2Blks = Lists.newArrayList( cmd2.getRecoveringBlocks()); assertEquals(blks.get(0).getBlock(), cmd2Blks.get(0).getBlock()); assertEquals(blks.get(1).getBlock(), cmd2Blks.get(1).getBlock()); assertEquals(Joiner.on(",").join(blks), Joiner.on(",").join(cmd2Blks)); assertEquals(cmd.toString(), cmd2.toString()); } @Test public void testConvertText() { Text t = new Text("abc".getBytes()); String s = t.toString(); Text t1 = new Text(s); assertEquals(t, t1); } @Test public void testConvertBlockToken() { Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>( "identifier".getBytes(), "password".getBytes(), new Text("kind"), new Text("service")); TokenProto tokenProto = PBHelperClient.convert(token); Token<BlockTokenIdentifier> token2 = PBHelperClient.convert(tokenProto); compare(token, token2); } @Test public void testConvertNamespaceInfo() { NamespaceInfo info = new NamespaceInfo(37, "clusterID", "bpID", 2300); NamespaceInfoProto proto = PBHelper.convert(info); NamespaceInfo info2 = PBHelper.convert(proto); compare(info, info2); //Compare the StorageInfo assertEquals(info.getBlockPoolID(), info2.getBlockPoolID()); assertEquals(info.getBuildVersion(), info2.getBuildVersion()); } private void compare(StorageInfo expected, StorageInfo actual) { assertEquals(expected.clusterID, actual.clusterID); assertEquals(expected.namespaceID, actual.namespaceID); assertEquals(expected.cTime, actual.cTime); assertEquals(expected.layoutVersion, actual.layoutVersion); } private void compare(Token<BlockTokenIdentifier> expected, Token<BlockTokenIdentifier> actual) { assertTrue(Arrays.equals(expected.getIdentifier(), actual.getIdentifier())); assertTrue(Arrays.equals(expected.getPassword(), actual.getPassword())); assertEquals(expected.getKind(), actual.getKind()); assertEquals(expected.getService(), actual.getService()); } private void compare(LocatedBlock expected, LocatedBlock actual) { assertEquals(expected.getBlock(), actual.getBlock()); compare(expected.getBlockToken(), actual.getBlockToken()); assertEquals(expected.getStartOffset(), actual.getStartOffset()); assertEquals(expected.isCorrupt(), actual.isCorrupt()); DatanodeInfo [] ei = expected.getLocations(); DatanodeInfo [] ai = actual.getLocations(); assertEquals(ei.length, ai.length); for (int i = 0; i < ei.length ; i++) { compare(ei[i], ai[i]); } } private LocatedBlock createLocatedBlock() { DatanodeInfo[] dnInfos = { DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h1", AdminStates.DECOMMISSION_INPROGRESS), DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h2", AdminStates.DECOMMISSIONED), DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h3", AdminStates.NORMAL), DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h4", AdminStates.NORMAL), }; String[] storageIDs = {"s1", "s2", "s3", "s4"}; StorageType[] media = { StorageType.DISK, StorageType.SSD, StorageType.DISK, StorageType.RAM_DISK }; LocatedBlock lb = new LocatedBlock( new ExtendedBlock("bp12", 12345, 10, 53), dnInfos, storageIDs, media, 5, false, new DatanodeInfo[]{}); lb.setBlockToken(new Token<BlockTokenIdentifier>( "identifier".getBytes(), "password".getBytes(), new Text("kind"), new Text("service"))); return lb; } private LocatedBlock createLocatedBlockNoStorageMedia() { DatanodeInfo[] dnInfos = { DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h1", AdminStates.DECOMMISSION_INPROGRESS), DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h2", AdminStates.DECOMMISSIONED), DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h3", AdminStates.NORMAL) }; LocatedBlock lb = new LocatedBlock( new ExtendedBlock("bp12", 12345, 10, 53), dnInfos); lb.setBlockToken(new Token<BlockTokenIdentifier>( "identifier".getBytes(), "password".getBytes(), new Text("kind"), new Text("service"))); lb.setStartOffset(5); return lb; } @Test public void testConvertLocatedBlock() { LocatedBlock lb = createLocatedBlock(); LocatedBlockProto lbProto = PBHelperClient.convertLocatedBlock(lb); LocatedBlock lb2 = PBHelperClient.convertLocatedBlockProto(lbProto); compare(lb,lb2); } @Test public void testConvertLocatedBlockNoStorageMedia() { LocatedBlock lb = createLocatedBlockNoStorageMedia(); LocatedBlockProto lbProto = PBHelperClient.convertLocatedBlock(lb); LocatedBlock lb2 = PBHelperClient.convertLocatedBlockProto(lbProto); compare(lb,lb2); } @Test public void testConvertLocatedBlockList() { ArrayList<LocatedBlock> lbl = new ArrayList<LocatedBlock>(); for (int i=0;i<3;i++) { lbl.add(createLocatedBlock()); } List<LocatedBlockProto> lbpl = PBHelperClient.convertLocatedBlocks2(lbl); List<LocatedBlock> lbl2 = PBHelperClient.convertLocatedBlocks(lbpl); assertEquals(lbl.size(), lbl2.size()); for (int i=0;i<lbl.size();i++) { compare(lbl.get(i), lbl2.get(2)); } } @Test public void testConvertLocatedBlockArray() { LocatedBlock [] lbl = new LocatedBlock[3]; for (int i=0;i<3;i++) { lbl[i] = createLocatedBlock(); } LocatedBlockProto [] lbpl = PBHelperClient.convertLocatedBlocks(lbl); LocatedBlock [] lbl2 = PBHelperClient.convertLocatedBlocks(lbpl); assertEquals(lbl.length, lbl2.length); for (int i=0;i<lbl.length;i++) { compare(lbl[i], lbl2[i]); } } @Test public void testConvertDatanodeRegistration() { DatanodeID dnId = DFSTestUtil.getLocalDatanodeID(); BlockKey[] keys = new BlockKey[] { getBlockKey(2), getBlockKey(3) }; ExportedBlockKeys expKeys = new ExportedBlockKeys(true, 9, 10, getBlockKey(1), keys); DatanodeRegistration reg = new DatanodeRegistration(dnId, new StorageInfo(NodeType.DATA_NODE), expKeys, "3.0.0"); DatanodeRegistrationProto proto = PBHelper.convert(reg); DatanodeRegistration reg2 = PBHelper.convert(proto); compare(reg.getStorageInfo(), reg2.getStorageInfo()); compare(reg.getExportedKeys(), reg2.getExportedKeys()); compare(reg, reg2); assertEquals(reg.getSoftwareVersion(), reg2.getSoftwareVersion()); } @Test public void TestConvertDatanodeStorage() { DatanodeStorage dns1 = new DatanodeStorage( "id1", DatanodeStorage.State.NORMAL, StorageType.SSD); DatanodeStorageProto proto = PBHelperClient.convert(dns1); DatanodeStorage dns2 = PBHelperClient.convert(proto); compare(dns1, dns2); } @Test public void testConvertBlockCommand() { Block[] blocks = new Block[] { new Block(21), new Block(22) }; DatanodeInfo[][] dnInfos = new DatanodeInfo[][] { new DatanodeInfo[1], new DatanodeInfo[2] }; dnInfos[0][0] = DFSTestUtil.getLocalDatanodeInfo(); dnInfos[1][0] = DFSTestUtil.getLocalDatanodeInfo(); dnInfos[1][1] = DFSTestUtil.getLocalDatanodeInfo(); String[][] storageIDs = {{"s00"}, {"s10", "s11"}}; StorageType[][] storageTypes = {{StorageType.DEFAULT}, {StorageType.DEFAULT, StorageType.DEFAULT}}; BlockCommand bc = new BlockCommand(DatanodeProtocol.DNA_TRANSFER, "bp1", blocks, dnInfos, storageTypes, storageIDs); BlockCommandProto bcProto = PBHelper.convert(bc); BlockCommand bc2 = PBHelper.convert(bcProto); assertEquals(bc.getAction(), bc2.getAction()); assertEquals(bc.getBlocks().length, bc2.getBlocks().length); Block[] blocks2 = bc2.getBlocks(); for (int i = 0; i < blocks.length; i++) { assertEquals(blocks[i], blocks2[i]); } DatanodeInfo[][] dnInfos2 = bc2.getTargets(); assertEquals(dnInfos.length, dnInfos2.length); for (int i = 0; i < dnInfos.length; i++) { DatanodeInfo[] d1 = dnInfos[i]; DatanodeInfo[] d2 = dnInfos2[i]; assertEquals(d1.length, d2.length); for (int j = 0; j < d1.length; j++) { compare(d1[j], d2[j]); } } } @Test public void testChecksumTypeProto() { assertEquals(DataChecksum.Type.NULL, PBHelperClient.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL)); assertEquals(DataChecksum.Type.CRC32, PBHelperClient.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32)); assertEquals(DataChecksum.Type.CRC32C, PBHelperClient.convert(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C)); assertEquals(PBHelperClient.convert(DataChecksum.Type.NULL), HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL); assertEquals(PBHelperClient.convert(DataChecksum.Type.CRC32), HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32); assertEquals(PBHelperClient.convert(DataChecksum.Type.CRC32C), HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C); } @Test public void testAclEntryProto() { // All fields populated. AclEntry e1 = new AclEntry.Builder().setName("test") .setPermission(FsAction.READ_EXECUTE).setScope(AclEntryScope.DEFAULT) .setType(AclEntryType.OTHER).build(); // No name. AclEntry e2 = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) .setType(AclEntryType.USER).setPermission(FsAction.ALL).build(); // No permission, which will default to the 0'th enum element. AclEntry e3 = new AclEntry.Builder().setScope(AclEntryScope.ACCESS) .setType(AclEntryType.USER).setName("test").build(); AclEntry[] expected = new AclEntry[] { e1, e2, new AclEntry.Builder() .setScope(e3.getScope()) .setType(e3.getType()) .setName(e3.getName()) .setPermission(FsAction.NONE) .build() }; AclEntry[] actual = Lists.newArrayList( PBHelperClient.convertAclEntry(PBHelperClient.convertAclEntryProto(Lists .newArrayList(e1, e2, e3)))).toArray(new AclEntry[0]); Assert.assertArrayEquals(expected, actual); } @Test public void testAclStatusProto() { AclEntry e = new AclEntry.Builder().setName("test") .setPermission(FsAction.READ_EXECUTE).setScope(AclEntryScope.DEFAULT) .setType(AclEntryType.OTHER).build(); AclStatus s = new AclStatus.Builder().owner("foo").group("bar").addEntry(e) .build(); Assert.assertEquals(s, PBHelperClient.convert(PBHelperClient.convert(s))); } @Test public void testBlockECRecoveryCommand() { DatanodeInfo[] dnInfos0 = new DatanodeInfo[] { DFSTestUtil.getLocalDatanodeInfo(), DFSTestUtil.getLocalDatanodeInfo() }; DatanodeStorageInfo targetDnInfos_0 = BlockManagerTestUtil .newDatanodeStorageInfo(DFSTestUtil.getLocalDatanodeDescriptor(), new DatanodeStorage("s00")); DatanodeStorageInfo targetDnInfos_1 = BlockManagerTestUtil .newDatanodeStorageInfo(DFSTestUtil.getLocalDatanodeDescriptor(), new DatanodeStorage("s01")); DatanodeStorageInfo[] targetDnInfos0 = new DatanodeStorageInfo[] { targetDnInfos_0, targetDnInfos_1 }; short[] liveBlkIndices0 = new short[2]; BlockECRecoveryInfo blkECRecoveryInfo0 = new BlockECRecoveryInfo( new ExtendedBlock("bp1", 1234), dnInfos0, targetDnInfos0, liveBlkIndices0, ErasureCodingPolicyManager.getSystemDefaultPolicy()); DatanodeInfo[] dnInfos1 = new DatanodeInfo[] { DFSTestUtil.getLocalDatanodeInfo(), DFSTestUtil.getLocalDatanodeInfo() }; DatanodeStorageInfo targetDnInfos_2 = BlockManagerTestUtil .newDatanodeStorageInfo(DFSTestUtil.getLocalDatanodeDescriptor(), new DatanodeStorage("s02")); DatanodeStorageInfo targetDnInfos_3 = BlockManagerTestUtil .newDatanodeStorageInfo(DFSTestUtil.getLocalDatanodeDescriptor(), new DatanodeStorage("s03")); DatanodeStorageInfo[] targetDnInfos1 = new DatanodeStorageInfo[] { targetDnInfos_2, targetDnInfos_3 }; short[] liveBlkIndices1 = new short[2]; BlockECRecoveryInfo blkECRecoveryInfo1 = new BlockECRecoveryInfo( new ExtendedBlock("bp2", 3256), dnInfos1, targetDnInfos1, liveBlkIndices1, ErasureCodingPolicyManager.getSystemDefaultPolicy()); List<BlockECRecoveryInfo> blkRecoveryInfosList = new ArrayList<BlockECRecoveryInfo>(); blkRecoveryInfosList.add(blkECRecoveryInfo0); blkRecoveryInfosList.add(blkECRecoveryInfo1); BlockECRecoveryCommand blkECRecoveryCmd = new BlockECRecoveryCommand( DatanodeProtocol.DNA_ERASURE_CODING_RECOVERY, blkRecoveryInfosList); BlockECRecoveryCommandProto blkECRecoveryCmdProto = PBHelper .convert(blkECRecoveryCmd); blkECRecoveryCmd = PBHelper.convert(blkECRecoveryCmdProto); Iterator<BlockECRecoveryInfo> iterator = blkECRecoveryCmd.getECTasks() .iterator(); assertBlockECRecoveryInfoEquals(blkECRecoveryInfo0, iterator.next()); assertBlockECRecoveryInfoEquals(blkECRecoveryInfo1, iterator.next()); } private void assertBlockECRecoveryInfoEquals( BlockECRecoveryInfo blkECRecoveryInfo1, BlockECRecoveryInfo blkECRecoveryInfo2) { assertEquals(blkECRecoveryInfo1.getExtendedBlock(), blkECRecoveryInfo2.getExtendedBlock()); DatanodeInfo[] sourceDnInfos1 = blkECRecoveryInfo1.getSourceDnInfos(); DatanodeInfo[] sourceDnInfos2 = blkECRecoveryInfo2.getSourceDnInfos(); assertDnInfosEqual(sourceDnInfos1, sourceDnInfos2); DatanodeInfo[] targetDnInfos1 = blkECRecoveryInfo1.getTargetDnInfos(); DatanodeInfo[] targetDnInfos2 = blkECRecoveryInfo2.getTargetDnInfos(); assertDnInfosEqual(targetDnInfos1, targetDnInfos2); String[] targetStorageIDs1 = blkECRecoveryInfo1.getTargetStorageIDs(); String[] targetStorageIDs2 = blkECRecoveryInfo2.getTargetStorageIDs(); assertEquals(targetStorageIDs1.length, targetStorageIDs2.length); for (int i = 0; i < targetStorageIDs1.length; i++) { assertEquals(targetStorageIDs1[i], targetStorageIDs2[i]); } short[] liveBlockIndices1 = blkECRecoveryInfo1.getLiveBlockIndices(); short[] liveBlockIndices2 = blkECRecoveryInfo2.getLiveBlockIndices(); for (int i = 0; i < liveBlockIndices1.length; i++) { assertEquals(liveBlockIndices1[i], liveBlockIndices2[i]); } ErasureCodingPolicy ecPolicy1 = blkECRecoveryInfo1.getErasureCodingPolicy(); ErasureCodingPolicy ecPolicy2 = blkECRecoveryInfo2.getErasureCodingPolicy(); // Compare ECPolicies same as default ECPolicy as we used system default // ECPolicy used in this test compareECPolicies(ErasureCodingPolicyManager.getSystemDefaultPolicy(), ecPolicy1); compareECPolicies(ErasureCodingPolicyManager.getSystemDefaultPolicy(), ecPolicy2); } private void compareECPolicies(ErasureCodingPolicy ecPolicy1, ErasureCodingPolicy ecPolicy2) { assertEquals(ecPolicy1.getName(), ecPolicy2.getName()); assertEquals(ecPolicy1.getNumDataUnits(), ecPolicy2.getNumDataUnits()); assertEquals(ecPolicy1.getNumParityUnits(), ecPolicy2.getNumParityUnits()); } private void assertDnInfosEqual(DatanodeInfo[] dnInfos1, DatanodeInfo[] dnInfos2) { assertEquals(dnInfos1.length, dnInfos2.length); for (int i = 0; i < dnInfos1.length; i++) { compare(dnInfos1[i], dnInfos2[i]); } } }
package com.github.sarxos.ryzom.network; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; import javax.crypto.Mac; import javax.crypto.spec.SecretKeySpec; import org.eclipse.jetty.websocket.api.Session; import org.eclipse.jetty.websocket.api.annotations.OnWebSocketClose; import org.eclipse.jetty.websocket.api.annotations.OnWebSocketConnect; import org.eclipse.jetty.websocket.api.annotations.OnWebSocketMessage; import org.eclipse.jetty.websocket.api.annotations.WebSocket; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; /** * LV-20 Ryzom Network WebSocket. The messages in this class has been reverse-engineered from the * chat service traffic. * * @author Bartosz Firyn (sarxos) */ @WebSocket(maxTextMessageSize = 64 * 1024) public class Lv20Socket { /** * I'm the logger. */ private static final Logger LOG = LoggerFactory.getLogger(Lv20Socket.class); /** * I'm the traffic logger. */ private static final Logger LOG_TRAFFIC = LoggerFactory.getLogger(Lv20Socket.class.getName() + ".traffic"); private static final String HMAC_SHA1_ALGORITHM = "HmacSHA1"; private static final String HMAC_SHA1_KEY = UUID.randomUUID().toString(); private static final ObjectMapper MAPPER = new ObjectMapper(); // chat IDs public static final String CHAT_TELL = "tell"; public static final String CHAT_UNIVERSE = "all"; public static final String CHAT_EN = "en"; public static final String CHAT_DE = "de"; public static final String CHAT_FR = "fr"; /** * Handle payload with 'o' prefix (server telling us that connection is open). * * @author Bartosz Firyn (sarxos) */ private final class PrefixHandlerO extends Lv20PrefixHandler { public PrefixHandlerO() { super('o'); } @Override protected void handle(String message) { ryzomConnectionOpen = true; } } /** * Handle heartbeat (just log it). * * @author Bartosz Firyn (sarxos) */ private final class PrefixHandlerH extends Lv20PrefixHandler { public PrefixHandlerH() { super('h'); } @Override protected void handle(String message) { LOG.debug("Heartbeat"); } } /** * Handle 'a' prefix (this is when server answer to the payload we've sent). * * @author Bartosz Firyn (sarxos) */ private final class PrefixHandlerA extends Lv20PrefixHandler { public PrefixHandlerA() { super('a'); } @Override protected void handle(String message) { process(message); } } /** * Process 'connected' response from server. * * @author Bartosz Firyn (sarxos) */ private final class MessageProcessorConnected extends Lv20MessageHandler { public MessageProcessorConnected() { super("connected"); } @Override protected boolean test(Map<String, Object> message) { return path(message, "session") != null; } @Override protected void handle(Map<String, Object> message) { ryzomSession = path(message, "session"); } } /** * Process 'ping' request from server. * * @author Bartosz Firyn (sarxos) */ private final class MessageProcessorPing extends Lv20MessageHandler { public MessageProcessorPing() { super("ping"); } @Override protected boolean test(Map<String, Object> message) { return true; } @Override protected void handle(Map<String, Object> message) { executor.execute(Lv20Socket.this::msgPong); } } /** * Process 'added' response from the server. This happen when user is logged in. * * @author Bartosz Firyn (sarxos) */ private final class MessageProcessorUsersAdded extends Lv20MessageHandler { // a["{\"msg\":\"added\",\"collection\":\"users\",\"id\":\"RN62wtdTrNjjDEFrP\",\"fields\":{\"profile\":{\"lang\":\"en\",\"email\":\"pianola@mailinator.com\",\"created_at\":1436995355},\"username\":\"Jenamessenger\"}}"] public MessageProcessorUsersAdded() { super("added"); } @Override protected boolean test(Map<String, Object> message) { return collection(message, "users") && path(message, "fields/profile/email") != null && path(message, "fields/username") != null; } @Override protected void handle(Map<String, Object> message) { ryzomUserResourceId = path(message, "id"); ryzomUserEmail = path(message, "fields/profile/email"); ryzomUserName = path(message, "fields/username"); executor.execute(Lv20Socket.this::msgSubMeteorAutoupdateClientVersions); executor.execute(Lv20Socket.this::msgSubMeteorLoginServiceConfiguration); executor.execute(Lv20Socket.this::msgSubIntercomHash); // executor.execute(Lv20Socket.this::msgSubI18n); executor.execute(Lv20Socket.this::msgSubLatestUniversChats); executor.execute(Lv20Socket.this::msgSubLatestTellChats); executor.execute(Lv20Socket.this::msgSubNextEvents); // executor.execute(Lv20Socket.this::msgSubGlobals); executor.execute(Lv20Socket.this::msgSubUserData); } } /** * Process 'changed' response from the server. This happen when user data on server is modified. * * @author Bartosz Firyn (sarxos) */ private final class MessageProcessorUsersChanged extends Lv20MessageHandler { // a["{\"msg\":\"changed\",\"collection\":\"users\",\"id\":\"RN62wtdTrNjjDEFrP\",\"fields\":{\"game\":{\"c60\":16977,\"cid\":11962608,\"guildId\":0,\"priv\":\"\"},\"status\":{\"lastLogin\":{\"$date\":1437165554271},\"online\":true}}}"] public MessageProcessorUsersChanged() { super("changed"); } @Override protected boolean test(Map<String, Object> message) { boolean valid = true; valid &= collection(message, "users"); valid &= path(message, "id").equals(ryzomUserResourceId); valid &= path(message, "fields/game/cid") != null; valid &= path(message, "fields/game/guildId") != null; return valid; } @Override protected void handle(Map<String, Object> message) { if (ryzomUserCid == -1 && ryzomUserGuildId == -1) { executor.execute(Lv20Socket.this::msgSubLatestGuidChats); } ryzomUserCid = path(message, "fields/game/cid"); ryzomUserGuildId = path(message, "fields/game/guildId"); } } /** * Process 'removed' response from the server. This happen when user logs out. * * @author Bartosz Firyn (sarxos) */ private final class MessageProcessorUsersRemoved extends Lv20MessageHandler { // a["{\"msg\":\"removed\",\"collection\":\"users\",\"id\":\"RN62wtdTrNjjDEFrP\"}"] public MessageProcessorUsersRemoved() { super("removed"); } @Override protected boolean test(Map<String, Object> message) { return collection(message, "users") && path(message, "id") != null; } @Override protected void handle(Map<String, Object> message) { if (!path(message, "id").equals(ryzomUserResourceId)) { throw new IllegalStateException("Invalid user resource ID received on logout, exoected to be " + ryzomUserResourceId); } ryzomUserName = null; ryzomUserEmail = null; ryzomUserResourceId = null; ryzomUserCid = -1; ryzomUserGuildId = -1; } } private final ExecutorService executor; private final AtomicInteger number = new AtomicInteger(0); private final List<Lv20PrefixHandler> prefixHandlers = new LinkedList<>(); private final List<Lv20MessageHandler> messageHandlers = new LinkedList<>(); private volatile Session session; private volatile boolean ryzomConnectionOpen = false; private volatile String ryzomSession; private volatile String ryzomUserName; private volatile String ryzomUserEmail; private volatile String ryzomUserResourceId; private volatile int ryzomUserGuildId = -1; private volatile int ryzomUserCid = -1; private volatile long timestamp; public Lv20Socket(ExecutorService executor) { this.executor = executor; addPrefixHandler(new PrefixHandlerO()); addPrefixHandler(new PrefixHandlerH()); addPrefixHandler(new PrefixHandlerA()); addMessageProcessor(new MessageProcessorConnected()); addMessageProcessor(new MessageProcessorPing()); addMessageProcessor(new MessageProcessorUsersAdded()); addMessageProcessor(new MessageProcessorUsersChanged()); addMessageProcessor(new MessageProcessorUsersRemoved()); } public void addPrefixHandler(Lv20PrefixHandler handler) { prefixHandlers.add(handler); } public void addMessageProcessor(Lv20MessageHandler processor) { messageHandlers.add(processor); } @OnWebSocketClose public void onClose(int statusCode, String reason) { LOG.debug("Ryzom WebSocket is now closed, with status '{}' and reason '{}'", statusCode, reason); session = null; } @OnWebSocketConnect public void onConnect(Session session) throws IOException { LOG.debug("Ryzom WebSocket is now connected"); LOG.trace("Session is {}", session); this.session = session; this.timestamp = System.currentTimeMillis(); } @OnWebSocketMessage public void onMessage(String message) { try { receive(message); } catch (Exception e) { LOG.error(e.getMessage(), e); throw e; } } public boolean isRyzomConnectionOpen() { return ryzomConnectionOpen; } public boolean isRyzomSessionEstablished() { return ryzomSession != null; } public String getRyzomSession() { return ryzomSession; } public String getRyzomUserName() { return ryzomUserName; } public int getRyzomUserGuildId() { return ryzomUserGuildId; } public int getRyzomUserCid() { return ryzomUserCid; } public String getRyzomUserEmail() { return ryzomUserEmail; } public boolean isRyzomUserLoggedIn() { boolean login = true; login &= ryzomUserName != null; login &= ryzomUserEmail != null; login &= ryzomUserResourceId != null; return login; } public boolean isRyzomUserLoggedOut() { boolean logout = true; logout &= ryzomUserName == null; logout &= ryzomUserEmail == null; logout &= ryzomUserResourceId == null; return logout; } private void send(Map<String, Object> map) { // null session can happen only when client is started but socket not yet connected, just // wait for the session to appear while (session == null) { LOG.trace("No session, loop awaiting"); try { Thread.sleep(10); } catch (InterruptedException e) { return; } } String message = serialize(map); LOG_TRAFFIC.trace("[-->]: {}", message); synchronized (session) { if (session.isOpen()) { try { session.getRemote().sendString(message); } catch (IOException e) { throw new IllegalStateException(e); } } else { throw new IllegalStateException("Ooops, the session is not open"); } } } private void receive(String payload) { Character prefix = payload.charAt(0); String message = payload.substring(1); LOG_TRAFFIC.trace("[<-{}]: {}", prefix, message); long count = prefixHandlers .stream() .filter(handler -> prefix.equals(handler.getPrefix())) .peek(handler -> handler.handle(message)) .count(); if (count == 0) { throw new IllegalStateException("Unhandled prefix " + prefix); } } private void process(String message) { Map<String, Object> map = unserialize(message); long count = messageHandlers .stream() .filter(processor -> processor.matches(map)) .peek(processor -> processor.handle(map)) .count(); if (count == 0) { LOG.debug("Message has not been processed: {}", map); } } /** * Serialize map to array of escaped JSON strings where every JSON represents map passed as the * argument. * * @param map the map * @return Message JSON string */ private static String serialize(Map<String, Object> map) { try { return MAPPER.writeValueAsString(Arrays.asList(MAPPER.writeValueAsString(map))); } catch (JsonProcessingException e) { throw new IllegalStateException(e); } } @SuppressWarnings("unchecked") private static Map<String, Object> unserialize(String message) { Map<String, Object> map = null; String msg = null; try { msg = (String) MAPPER.readValue(message, ArrayList.class).get(0); map = MAPPER.readValue(msg, HashMap.class); } catch (IOException e) { throw new IllegalStateException(e); } return map; } /** * @return Next numeric ID */ private String idnum() { return Integer.toString(number.incrementAndGet()); } /** * @return Unique ID */ private static String identity() { SecretKeySpec signingKey = new SecretKeySpec(HMAC_SHA1_KEY.getBytes(), HMAC_SHA1_ALGORITHM); try { Mac mac = Mac.getInstance(HMAC_SHA1_ALGORITHM); mac.init(signingKey); byte[] hmac = mac.doFinal(UUID.randomUUID().toString().getBytes()); return new String(Base64.getEncoder().encode(hmac), "UTF8") .substring(0, 17) .replaceAll("/", "a") .replaceAll("\\+", "b") .replaceAll("=", "c"); } catch (Exception e) { throw new IllegalStateException("Failed to generate HMAC", e); } } // ---------------------- messages -------------------------- // ["{\"msg\":\"connect\",\"version\":\"pre2\",\"support\":[\"pre2\",\"pre1\"]}"] protected final void msgConnect() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "connect"); map.put("version", "pre2"); map.put("support", new Object[] { "pre2", "pre1" }); send(map); } // ["{\"msg\":\"sub\",\"id\":\"9vywbKZowez7Ks6vA\",\"name\":\"meteor_autoupdate_clientVersions\",\"params\":[],\"route\":null}"] protected final void msgSubMeteorAutoupdateClientVersions() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "meteor_autoupdate_clientVersions"); map.put("params", new Object[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"JGQrEBLyFknkeK64A\",\"name\":\"meteor.loginServiceConfiguration\",\"params\":[],\"route\":null}"] protected final void msgSubMeteorLoginServiceConfiguration() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "meteor.loginServiceConfiguration"); map.put("params", new Object[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"84vcaioKamcKuK82E\",\"name\":\"intercomHash\",\"params\":[],\"route\":null}"] protected final void msgSubIntercomHash() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "intercomHash"); map.put("params", new Object[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"MPSvr8wrD8CDyZESh\",\"name\":\"i18n\",\"params\":[\"en\"],\"route\":null}"] protected final void msgSubI18n() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "i18n"); map.put("params", new Object[] { "en" }); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"t2YqbWuvRRxQEqun7\",\"name\":\"latestUniversChats\",\"params\":[],\"route\":null}"] protected final void msgSubLatestUniversChats() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "latestUniversChats"); map.put("params", new Object[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"WLtjFM5KYC7ByatBh\",\"name\":\"latestTellChats\",\"params\":[],\"route\":null}"] protected final void msgSubLatestTellChats() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "latestTellChats"); map.put("params", new Object[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"x25XiqYmop7CNEemG\",\"name\":\"latestGuildChats\",\"params\":[0],\"route\":null}"] protected final void msgSubLatestGuidChats() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "latestGuildChats"); map.put("params", new Object[] { ryzomUserGuildId }); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"Y4n58s8S65miP6Bdg\",\"name\":\"nextEvents\",\"params\":[],\"route\":null}"] protected final void msgSubNextEvents() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "nextEvents"); map.put("params", new String[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"v7SwezbiAHzCNmmJJ\",\"name\":\"documentsUnlocked\",\"params\":[\"en\",null],\"route\":null}"] protected final void msgSubDocumentsUnlocked() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "documentsUnlocked"); map.put("params", new String[] { "en", null }); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"neRAZv67km2BawjqN\",\"name\":\"documents\",\"params\":[],\"route\":null}"] protected final void msgSubDocuments() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "documents"); map.put("params", new String[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"yxwv2mHqdGiKGakW9\",\"name\":\"documentsUnlocking\",\"params\":[],\"route\":null}"] protected final void msgSubDocumentsUnlocking() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "documentsUnlocking"); map.put("params", new String[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"FH83NmYp7fGxLZto3\",\"name\":\"globals\",\"params\":[],\"route\":null}"] protected final void msgSubGlobals() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "globals"); map.put("params", new String[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"NvZL5PCaxiL38gHov\",\"name\":\"ladders\",\"params\":[],\"route\":null}"] protected final void msgSubLadders() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "ladders"); map.put("params", new String[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"cWsHpRxdAhYCGfCX2\",\"name\":\"laddersMine\",\"params\":[],\"route\":null}"] protected final void msgSubLaddersMine() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "laddersMine"); map.put("params", new String[0]); map.put("route", null); send(map); } // ["{\"msg\":\"sub\",\"id\":\"yq8iBQWRFRT6PG2mo\",\"name\":\"userData\",\"params\":[],\"route\":null}"] protected final void msgSubUserData() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "sub"); map.put("id", identity()); map.put("name", "userData"); map.put("params", new String[0]); map.put("route", null); send(map); } // ["{\"msg\":\"pong\"}"] protected final void msgPong() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "pong"); send(map); } // ["{\"msg\":\"method\",\"method\":\"user-status-idle\",\"params\":[1436980809850],\"id\":\"3\"}"] protected final void msgMethodUserStatusIdle() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "method"); map.put("method", "user-status-idle"); map.put("params", new long[] { timestamp }); map.put("id", idnum()); send(map); } // ["{\"msg\":\"method\",\"method\":\"user-status-active\",\"params\":[1436980809850],\"id\":\"2\"}"] protected final void msgMethodUserStatusActive() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "method"); map.put("method", "user-status-active"); map.put("params", new long[] { timestamp }); map.put("id", idnum()); send(map); } // ["{\"msg\":\"method\",\"method\":\"login\",\"params\":[{\"ryzom\":true,\"username\":\"the-user-name\",\"password\":\"the-user-passwd\",\"lang\":\"en\"}],\"id\":\"1\"}"] protected final void msgMethodLogin(String username, String password) { Map<String, Object> params = new LinkedHashMap<>(); params.put("ryzom", true); params.put("username", username); params.put("password", password); params.put("lang", "en"); Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "method"); map.put("method", "login"); map.put("params", Arrays.asList(params)); map.put("id", idnum()); send(map); } // ["{\"msg\":\"method\",\"method\":\"logout\",\"params\":[],\"id\":\"3\"}"] protected final void msgMethodLogout() { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "method"); map.put("method", "logout"); map.put("params", new Object[0]); map.put("id", idnum()); send(map); } // ["{\"msg\":\"method\",\"method\":\"chat\",\"params\":[\"all\",\"How are // you?\"],\"id\":\"15\"}"] protected final void msgMethodChat(String chat, String text) { Map<String, Object> map = new LinkedHashMap<>(); map.put("msg", "method"); map.put("method", "chat"); map.put("params", new String[] { chat, text }); map.put("id", idnum()); send(map); } // ["{\"msg\":\"method\",\"method\":\"chat\",\"params\":[\"tell\",\"Kopeas // bubu\"],\"id\":\"7\"}"] }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jena.arq.querybuilder; import java.util.Collection; import org.apache.jena.arq.querybuilder.clauses.ConstructClause; import org.apache.jena.arq.querybuilder.clauses.DatasetClause; import org.apache.jena.arq.querybuilder.clauses.SolutionModifierClause; import org.apache.jena.arq.querybuilder.clauses.WhereClause; import org.apache.jena.arq.querybuilder.handlers.ConstructHandler; import org.apache.jena.arq.querybuilder.handlers.DatasetHandler; import org.apache.jena.arq.querybuilder.handlers.SolutionModifierHandler; import org.apache.jena.arq.querybuilder.handlers.WhereHandler; import com.hp.hpl.jena.graph.FrontsTriple; import com.hp.hpl.jena.graph.Node; import com.hp.hpl.jena.graph.Triple; import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.sparql.lang.sparql_11.ParseException; public class ConstructBuilder extends AbstractQueryBuilder<ConstructBuilder> implements DatasetClause<ConstructBuilder>, WhereClause<ConstructBuilder>, SolutionModifierClause<ConstructBuilder>, ConstructClause<ConstructBuilder> { private final DatasetHandler datasetHandler; private final WhereHandler whereHandler; private final SolutionModifierHandler solutionModifier; private final ConstructHandler constructHandler; public ConstructBuilder() { super(); query.setQueryConstructType(); datasetHandler = new DatasetHandler(query); whereHandler = new WhereHandler(query); solutionModifier = new SolutionModifierHandler(query); constructHandler = new ConstructHandler(query); } @Override public DatasetHandler getDatasetHandler() { return datasetHandler; } @Override public WhereHandler getWhereHandler() { return whereHandler; } @Override public ConstructHandler getConstructHandler() { return constructHandler; } @Override public SolutionModifierHandler getSolutionModifierHandler() { return solutionModifier; } @Override public ConstructBuilder clone() { ConstructBuilder qb = new ConstructBuilder(); qb.prologHandler.addAll(prologHandler); qb.datasetHandler.addAll(datasetHandler); qb.whereHandler.addAll(whereHandler); qb.solutionModifier.addAll(solutionModifier); qb.constructHandler.addAll(constructHandler); return qb; } @Override public ConstructBuilder fromNamed(String graphName) { datasetHandler.fromNamed(graphName); return this; } @Override public ConstructBuilder fromNamed(Collection<String> graphNames) { datasetHandler.fromNamed(graphNames); return this; } @Override public ConstructBuilder from(String graphName) { datasetHandler.from(graphName); return this; } @Override public ConstructBuilder from(Collection<String> graphName) { datasetHandler.from(graphName); return this; } @Override public ConstructBuilder addOrderBy(String orderBy) { solutionModifier.addOrderBy(orderBy); return this; } @Override public ConstructBuilder addGroupBy(String groupBy) { solutionModifier.addGroupBy(groupBy); return this; } @Override public ConstructBuilder addHaving(String having) throws ParseException { solutionModifier.addHaving(having); return this; } @Override public ConstructBuilder setLimit(int limit) { solutionModifier.setLimit(limit); return this; } @Override public ConstructBuilder setOffset(int offset) { solutionModifier.setOffset(offset); return this; } private static String toString(Node node) { if (node.isBlank()) { return node.getBlankNodeLabel(); } if (node.isLiteral()) { return node.toString(); } if (node.isURI()) { return String.format("<%s>", node.getURI()); } if (node.isVariable()) { return String.format("?%s", node.getName()); } return node.toString(); } public static String makeString(Object o) { if (o instanceof RDFNode) { return toString(((RDFNode) o).asNode()); } if (o instanceof Node) { return toString((Node) o); } return o.toString(); } @Override public ConstructBuilder addWhere(Triple t) { whereHandler.addWhere(t); return this; } @Override public ConstructBuilder addWhere(FrontsTriple t) { whereHandler.addWhere(t.asTriple()); return this; } @Override public ConstructBuilder addWhere(Object s, Object p, Object o) { addWhere(new Triple(makeNode(s), makeNode(p), makeNode(o))); return this; } @Override public ConstructBuilder addOptional(Triple t) { whereHandler.addOptional(t); return this; } @Override public ConstructBuilder addOptional(FrontsTriple t) { whereHandler.addOptional(t.asTriple()); return this; } @Override public ConstructBuilder addOptional(Object s, Object p, Object o) { addOptional(new Triple(makeNode(s), makeNode(p), makeNode(o))); return this; } @Override public ConstructBuilder addFilter(String s) throws ParseException { whereHandler.addFilter(s); return this; } @Override public ConstructBuilder addSubQuery(SelectBuilder subQuery) { prologHandler.addAll(subQuery.prologHandler); whereHandler.addSubQuery(subQuery); return this; } @Override public ConstructBuilder addUnion(SelectBuilder subQuery) { whereHandler.addUnion(subQuery); return this; } @Override public ConstructBuilder addGraph(Object graph, SelectBuilder subQuery) { prologHandler.addAll(subQuery.prologHandler); whereHandler.addGraph(makeNode(graph), subQuery.getWhereHandler()); return this; } @Override public ConstructBuilder addConstruct(Triple t) { constructHandler.addConstruct(t); return this; } @Override public ConstructBuilder addConstruct(FrontsTriple t) { return addConstruct(t.asTriple()); } @Override public ConstructBuilder addConstruct(Object s, Object p, Object o) { return addConstruct(new Triple(makeNode(s), makeNode(p), makeNode(o))); } }
/** * Copyright 2011 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.SortedSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.SmallTests; import org.junit.Test; import com.google.common.collect.ComparisonChain; import com.google.common.collect.Multimap; import org.junit.experimental.categories.Category; @Category(SmallTests.class) public class TestRegionSplitCalculator { final static Log LOG = LogFactory.getLog(TestRegionSplitCalculator.class); /** * This is range uses a user specified start and end keys. It also has an * extra time based tiebreaker so that different ranges with the same * start/end key pair count as different regions. */ static class SimpleRange implements KeyRange { byte[] start, end; long tiebreaker; SimpleRange(byte[] start, byte[] end) { this.start = start; this.end = end; this.tiebreaker = System.nanoTime(); } @Override public byte[] getStartKey() { return start; } @Override public byte[] getEndKey() { return end; } public String toString() { return "[" + Bytes.toString(start) + ", " + Bytes.toString(end) + "]"; } } Comparator<SimpleRange> cmp = new Comparator<SimpleRange>() { @Override public int compare(SimpleRange sr1, SimpleRange sr2) { ComparisonChain cc = ComparisonChain.start(); cc = cc.compare(sr1.getStartKey(), sr2.getStartKey(), Bytes.BYTES_COMPARATOR); cc = cc.compare(sr1.getEndKey(), sr2.getEndKey(), RegionSplitCalculator.BYTES_COMPARATOR); cc = cc.compare(sr1.tiebreaker, sr2.tiebreaker); return cc.result(); } }; /** * Check the "depth" (number of regions included at a split) of a generated * split calculation */ void checkDepths(SortedSet<byte[]> splits, Multimap<byte[], SimpleRange> regions, Integer... depths) { assertEquals(splits.size(), depths.length); int i = 0; for (byte[] k : splits) { Collection<SimpleRange> rs = regions.get(k); int sz = rs == null ? 0 : rs.size(); assertEquals((int) depths[i], sz); i++; } } /** * This dumps data in a visually reasonable way for visual debugging. It has * the basic iteration structure. */ String dump(SortedSet<byte[]> splits, Multimap<byte[], SimpleRange> regions) { // we display this way because the last end key should be displayed as well. StringBuilder sb = new StringBuilder(); for (byte[] k : splits) { sb.append(Bytes.toString(k) + ":\t"); for (SimpleRange r : regions.get(k)) { sb.append(r.toString() + "\t"); } sb.append("\n"); } String s = sb.toString(); LOG.info("\n" + s); return s; } @Test public void testSplitCalculator() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B")); SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C")); SimpleRange c = new SimpleRange(Bytes.toBytes("C"), Bytes.toBytes("D")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); sc.add(b); sc.add(c); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("Standard"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 1, 1, 1, 0); assertEquals(res, "A:\t[A, B]\t\n" + "B:\t[B, C]\t\n" + "C:\t[C, D]\t\n" + "D:\t\n"); } @Test public void testSplitCalculatorNoEdge() { RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("Empty"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions); assertEquals(res, ""); } @Test public void testSplitCalculatorSingleEdge() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("Single edge"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 1, 0); assertEquals(res, "A:\t[A, B]\t\n" + "B:\t\n"); } @Test public void testSplitCalculatorDegenerateEdge() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("A")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("Single empty edge"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 1); assertEquals(res, "A:\t[A, A]\t\n"); } @Test public void testSplitCalculatorCoverSplit() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B")); SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C")); SimpleRange c = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); sc.add(b); sc.add(c); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("AC covers AB, BC"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 2, 2, 0); assertEquals(res, "A:\t[A, B]\t[A, C]\t\n" + "B:\t[A, C]\t[B, C]\t\n" + "C:\t\n"); } @Test public void testSplitCalculatorOverEndpoint() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B")); SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C")); SimpleRange c = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("D")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); sc.add(b); sc.add(c); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("AB, BD covers BC"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 1, 2, 1, 0); assertEquals(res, "A:\t[A, B]\t\n" + "B:\t[B, C]\t[B, D]\t\n" + "C:\t[B, D]\t\n" + "D:\t\n"); } @Test public void testSplitCalculatorHoles() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B")); SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C")); SimpleRange c = new SimpleRange(Bytes.toBytes("E"), Bytes.toBytes("F")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); sc.add(b); sc.add(c); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("Hole between C and E"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 1, 1, 0, 1, 0); assertEquals(res, "A:\t[A, B]\t\n" + "B:\t[B, C]\t\n" + "C:\t\n" + "E:\t[E, F]\t\n" + "F:\t\n"); } @Test public void testSplitCalculatorOverreach() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C")); SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("D")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); sc.add(b); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("AC and BD overlap but share no start/end keys"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 1, 2, 1, 0); assertEquals(res, "A:\t[A, C]\t\n" + "B:\t[A, C]\t[B, D]\t\n" + "C:\t[B, D]\t\n" + "D:\t\n"); } @Test public void testSplitCalculatorFloor() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C")); SimpleRange b = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); sc.add(b); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("AC and AB overlap in the beginning"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 2, 1, 0); assertEquals(res, "A:\t[A, B]\t[A, C]\t\n" + "B:\t[A, C]\t\n" + "C:\t\n"); } @Test public void testSplitCalculatorCeil() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C")); SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); sc.add(b); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("AC and BC overlap in the end"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 1, 2, 0); assertEquals(res, "A:\t[A, C]\t\n" + "B:\t[A, C]\t[B, C]\t\n" + "C:\t\n"); } @Test public void testSplitCalculatorEq() { SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C")); SimpleRange b = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); sc.add(b); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("AC and AC overlap completely"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 2, 0); assertEquals(res, "A:\t[A, C]\t[A, C]\t\n" + "C:\t\n"); } @Test public void testSplitCalculatorBackwards() { SimpleRange a = new SimpleRange(Bytes.toBytes("C"), Bytes.toBytes("A")); RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(a); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("CA is backwards"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions); // expect nothing assertEquals(res, ""); } @Test public void testComplex() { RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("Am"))); sc.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"))); sc.add(new SimpleRange(Bytes.toBytes("Am"), Bytes.toBytes("C"))); sc.add(new SimpleRange(Bytes.toBytes("D"), Bytes.toBytes("E"))); sc.add(new SimpleRange(Bytes.toBytes("F"), Bytes.toBytes("G"))); sc.add(new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("E"))); sc.add(new SimpleRange(Bytes.toBytes("H"), Bytes.toBytes("I"))); sc.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"))); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("Something fairly complex"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 3, 3, 3, 1, 2, 0, 1, 0, 1, 0); assertEquals(res, "A:\t[A, Am]\t[A, B]\t[A, C]\t\n" + "Am:\t[A, B]\t[A, C]\t[Am, C]\t\n" + "B:\t[A, C]\t[Am, C]\t[B, E]\t\n" + "C:\t[B, E]\t\n" + "D:\t[B, E]\t[D, E]\t\n" + "E:\t\n" + "F:\t[F, G]\t\n" + "G:\t\n" + "H:\t[H, I]\t\n" + "I:\t\n"); } @Test public void testBeginEndMarker() { RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<SimpleRange>( cmp); sc.add(new SimpleRange(Bytes.toBytes(""), Bytes.toBytes("A"))); sc.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"))); sc.add(new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes(""))); Multimap<byte[], SimpleRange> regions = sc.calcCoverage(); LOG.info("Special cases -- empty"); String res = dump(sc.getSplits(), regions); checkDepths(sc.getSplits(), regions, 1, 1, 1, 0); assertEquals(res, ":\t[, A]\t\n" + "A:\t[A, B]\t\n" + "B:\t[B, ]\t\n" + "null:\t\n"); } @Test public void testBigRanges() { SimpleRange ai = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("I")); SimpleRange ae = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("E")); SimpleRange ac = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C")); Collection<SimpleRange> bigOverlap = new ArrayList<SimpleRange>(); bigOverlap.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("E"))); bigOverlap.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"))); bigOverlap.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"))); bigOverlap.add(new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"))); bigOverlap.add(new SimpleRange(Bytes.toBytes("E"), Bytes.toBytes("H"))); bigOverlap.add(ai); bigOverlap.add(ae); bigOverlap.add(ac); // Expect 1 range to be returned: ai List<SimpleRange> bigRanges = RegionSplitCalculator.findBigRanges(bigOverlap, 1); assertEquals(1, bigRanges.size()); assertEquals(ai, bigRanges.get(0)); // Expect 3 ranges to be returned: ai, ae and ac bigRanges = RegionSplitCalculator.findBigRanges(bigOverlap, 3); assertEquals(3, bigRanges.size()); assertEquals(ai, bigRanges.get(0)); SimpleRange r1 = bigRanges.get(1); SimpleRange r2 = bigRanges.get(2); assertEquals(Bytes.toString(r1.start), "A"); assertEquals(Bytes.toString(r2.start), "A"); String r1e = Bytes.toString(r1.end); String r2e = Bytes.toString(r2.end); assertTrue((r1e.equals("C") && r2e.equals("E")) || (r1e.equals("E") && r2e.equals("C"))); } @org.junit.Rule public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu = new org.apache.hadoop.hbase.ResourceCheckerJUnitRule(); }
/** * Copyright (c) 2013-2019 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.service.rest.field; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.BitSet; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.commons.lang3.reflect.FieldUtils; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.locationtech.geowave.service.rest.GeoWaveOperationServiceWrapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParametersDelegate; import com.google.common.base.Function; import com.google.common.collect.Lists; public class RestFieldFactory { private static final Logger LOGGER = LoggerFactory.getLogger(GeoWaveOperationServiceWrapper.class); private static final BitSet UNESCAPED_CHARS = initUnescapedChars(); @FunctionalInterface private interface ParameterInitializer<T extends RestField<?>> { public T apply(Field field, Parameter parameter, Object instance); } @FunctionalInterface private interface MainParamInitializer<T extends RestField<?>> { public T apply( String name, boolean isList, Field mainParamField, int subfieldOrdinal, int totalSize, Object instance); } public static List<RestField<?>> createRestFields(final Class<?> instanceType) { return internalCreateRestFields( // for just getting the fields we don't need to waste time on // using reflection to get an instance, that is only necessary // for setting values null, instanceType, (ParameterInitializer<RestField<?>>) ( final Field field, final Parameter parameter, final Object instance) -> new ParameterRestField(field, parameter), ( final String name, final boolean isList, final Field mainParamField, final int subfieldOrdinal, final int totalSize, final Object instance) -> new BasicRestField( name, isList ? List.class : String.class, "main parameter", true)); } public static List<RestFieldValue<?>> createRestFieldValues(final Object instance) { return internalCreateRestFields( instance, instance.getClass(), (ParameterInitializer<RestFieldValue<?>>) ( final Field field, final Parameter parameter, final Object i) -> new ParameterRestFieldValue(field, parameter, i), ( final String name, final boolean isList, final Field mainParamField, final int subfieldOrdinal, final int totalSize, final Object i) -> isList ? new ListMainParam( subfieldOrdinal, totalSize, mainParamField, new BasicRestField<>(name, List.class, "main parameter", true), instance) : new StringMainParam( subfieldOrdinal, totalSize, mainParamField, new BasicRestField<>(name, String.class, "main parameter", true), instance)); } private static <T extends RestField<?>> List<T> internalCreateRestFields( final Object instance, final Class<?> instanceType, final ParameterInitializer<T> parameterInitializer, final MainParamInitializer<T> mainParamInitializer) { final List<T> retVal = new ArrayList<>(); for (final Field field : FieldUtils.getFieldsWithAnnotation(instanceType, Parameter.class)) { retVal.addAll( internalCreateRestFields( field, field.getAnnotation(Parameter.class), instance, parameterInitializer, mainParamInitializer)); } for (final Field field : FieldUtils.getFieldsWithAnnotation( instanceType, ParametersDelegate.class)) { try { final Class<?> delegateInstanceType; Object delegateInstance; if (instance != null) { // HP Fortify "Access Control" false positive // The need to change the accessibility here is // necessary, has been review and judged to be safe field.setAccessible(true); delegateInstance = field.get(instance); if (delegateInstance == null) { delegateInstanceType = field.getType(); delegateInstance = delegateInstanceType.newInstance(); } else { delegateInstanceType = delegateInstance.getClass(); if (delegateInstance instanceof Map) { for (final Object mapValueInstance : ((Map) delegateInstance).values()) { final Class<?> mapValueInstanceType = mapValueInstance.getClass(); retVal.addAll( internalCreateRestFields( mapValueInstance, mapValueInstanceType, parameterInitializer, mainParamInitializer)); } } } field.set(instance, delegateInstance); } else { delegateInstanceType = field.getType(); // here just assume if instance was null we don't need to // waste // time on reflection to make delegate instance delegateInstance = null; } retVal.addAll( internalCreateRestFields( delegateInstance, delegateInstanceType, parameterInitializer, mainParamInitializer)); } catch (InstantiationException | IllegalAccessException e) { LOGGER.error("Unable to instantiate field", e); } } return retVal; } private static <T extends RestField<?>> List<T> internalCreateRestFields( final Field field, final Parameter parameter, final Object instance, final ParameterInitializer<T> parameterInitializer, final MainParamInitializer<T> mainParamInitializer) { // handle case for core/main params for a command // for now we parse based on assumptions within description // TODO see Issue #1185 for details on a more explicit main // parameter suggestion final String desc = parameter.description(); // this is intended to match one or more "<" + at least one alphanumeric // or some select special character + ">" if (List.class.isAssignableFrom(field.getType()) && !desc.isEmpty() && desc.matches("(<[a-zA-Z0-9:/\\s]+>\\s*)+")) { int currentEndParamIndex = 0; // this simply is collecting names and a flag to indicate if its a // list final List<Pair<String, Boolean>> individualParams = new ArrayList<>(); do { final int currentStartParamIndex = desc.indexOf('<', currentEndParamIndex); if ((currentStartParamIndex < 0) || (currentStartParamIndex >= (desc.length() - 1))) { break; } currentEndParamIndex = desc.indexOf('>', currentStartParamIndex + 1); final String fullName = desc.substring(currentStartParamIndex + 1, currentEndParamIndex).trim(); if (!fullName.isEmpty()) { if (fullName.startsWith("comma separated list of ")) { individualParams.add(ImmutablePair.of(fullName.substring(24).trim(), true)); } else if (fullName.startsWith("comma delimited ")) { individualParams.add(ImmutablePair.of(fullName.substring(16).trim(), true)); } else { individualParams.add(ImmutablePair.of(fullName, false)); } } } while ((currentEndParamIndex > 0) && (currentEndParamIndex < desc.length())); final int totalSize = individualParams.size(); return Lists.transform(individualParams, new Function<Pair<String, Boolean>, T>() { int i = 0; @Override public T apply(final Pair<String, Boolean> input) { if (input != null) { return mainParamInitializer.apply( toURLFriendlyString(input.getLeft()), input.getRight(), field, i++, totalSize, instance); } else { return null; } } }); } else { return Collections.singletonList(parameterInitializer.apply(field, parameter, instance)); } } public static String toURLFriendlyString(final String str) { boolean needToChange = false; final StringBuffer out = new StringBuffer(str.length()); for (int i = 0; i < str.length(); i++) { final int c = str.charAt(i); if (UNESCAPED_CHARS.get(c)) { out.append((char) c); } else { needToChange = true; out.append('_'); } } return (needToChange ? out.toString() : str); } private static BitSet initUnescapedChars() { final BitSet unescapedChars = new BitSet(256); int i; for (i = 'a'; i <= 'z'; i++) { unescapedChars.set(i); } for (i = 'A'; i <= 'Z'; i++) { unescapedChars.set(i); } for (i = '0'; i <= '9'; i++) { unescapedChars.set(i); unescapedChars.set('-'); unescapedChars.set('_'); unescapedChars.set('.'); unescapedChars.set('*'); } return unescapedChars; } }
/* * * Copyright 2015 Robert Winkler * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * */ package io.github.swagger2markup.markup.builder.internal.markdown; import io.github.swagger2markup.markup.builder.*; import io.github.swagger2markup.markup.builder.internal.AbstractMarkupDocBuilder; import io.github.swagger2markup.markup.builder.internal.Markup; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import java.io.Reader; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.apache.commons.lang3.StringUtils.join; /** * @author Robert Winkler */ public class MarkdownBuilder extends AbstractMarkupDocBuilder { private static final Pattern TITLE_PATTERN = Pattern.compile(String.format("^(%s{1,%d})\\s+(.*)$", Markdown.TITLE, MAX_TITLE_LEVEL + 1)); private static final Map<MarkupBlockStyle, String> BLOCK_STYLE = new HashMap<MarkupBlockStyle, String>() {{ put(MarkupBlockStyle.EXAMPLE, ""); put(MarkupBlockStyle.LISTING, Markdown.LISTING.toString()); put(MarkupBlockStyle.LITERAL, Markdown.LISTING.toString()); put(MarkupBlockStyle.PASSTHROUGH, ""); put(MarkupBlockStyle.SIDEBAR, ""); }}; public MarkdownBuilder() { super(); } public MarkdownBuilder(String newLine) { super(newLine); } protected MarkupLanguage getMarkupLanguage() { return MarkupLanguage.MARKDOWN; } @Override public MarkupDocBuilder copy(boolean copyBuffer) { MarkdownBuilder builder = new MarkdownBuilder(newLine); if (copyBuffer) builder.documentBuilder = new StringBuilder(this.documentBuilder); return builder.withAnchorPrefix(anchorPrefix); } @Override public MarkupDocBuilder documentTitle(String title) { documentTitle(Markdown.DOCUMENT_TITLE, title); return this; } @Override public MarkupDocBuilder sectionTitleLevel(int level, String title) { sectionTitleLevel(Markdown.TITLE, level, title); return this; } @Override public MarkupDocBuilder sectionTitleWithAnchorLevel(int level, String title, String anchor) { sectionTitleWithAnchorLevel(Markdown.TITLE, level, title, anchor); return this; } @Override public MarkupDocBuilder paragraph(String text, boolean hardbreaks) { Validate.notBlank(text, "text must not be blank"); text = text.trim(); if (hardbreaks) text = replaceNewLines(text, Markdown.LINE_BREAK + newLine); else text = replaceNewLines(text); documentBuilder.append(text).append(newLine).append(newLine); return this; } @Override public MarkupDocBuilder pageBreak() { documentBuilder.append(newLine).append("***").append(newLine); return this; } @Override public MarkupDocBuilder block(String text, final MarkupBlockStyle style, String title, MarkupAdmonition admonition) { boolean multiline = text.contains(newLine); if (admonition != null) documentBuilder.append(StringUtils.capitalize(admonition.name().toLowerCase())); if (title != null) { if (admonition != null) documentBuilder.append(" | "); documentBuilder.append(title); } if (admonition != null || title != null) documentBuilder.append(" : ").append(newLine); Markup m = new Markup() { public String toString() { return BLOCK_STYLE.get(style); } }; if (style == MarkupBlockStyle.LISTING && multiline) { delimitedBlockText(m, text, m, true); } else { delimitedBlockText(m, text, m); } return this; } @Override public MarkupDocBuilder listingBlock(String text, String language) { text = newLine + text; if (language != null) { text = language + text; } block(text, MarkupBlockStyle.LISTING); return this; } @Override public MarkupDocBuilder literalText(String text) { boldText(Markdown.LITERAL, text); return this; } @Override public MarkupDocBuilder boldText(String text) { boldText(Markdown.BOLD, text); return this; } @Override public MarkupDocBuilder italicText(String text) { italicText(Markdown.ITALIC, text); return this; } @Override public MarkupDocBuilder unorderedList(List<String> list) { unorderedList(Markdown.LIST_ENTRY, list); return this; } @Override public MarkupDocBuilder unorderedListItem(String item) { unorderedListItem(Markdown.LIST_ENTRY, item); return this; } private String normalizeAnchor(String anchor) { return normalizeAnchor(Markdown.SPACE_ESCAPE, anchor); } @Override public MarkupDocBuilder anchor(String anchor, String text) { documentBuilder.append("<a name=\"").append(normalizeAnchor(anchor)).append("\"></a>"); return this; } @Override public MarkupDocBuilder crossReferenceRaw(String document, String anchor, String text) { if (text == null) text = anchor.trim(); documentBuilder.append("[").append(text).append("]").append("("); if (document != null) documentBuilder.append(document); documentBuilder.append("#").append(anchor).append(")"); return this; } @Override public MarkupDocBuilder crossReference(String document, String anchor, String text) { return crossReferenceRaw(document, normalizeAnchor(anchor), text); } private String formatTableCell(String cell) { cell = replaceNewLines(cell.trim(), "<br>"); return cell.replace(Markdown.TABLE_COLUMN_DELIMITER.toString(), "\\" + Markdown.TABLE_COLUMN_DELIMITER.toString()); } @Override public MarkupDocBuilder tableWithColumnSpecs(List<MarkupTableColumn> columnSpecs, List<List<String>> cells) { Validate.notEmpty(cells, "cells must not be null"); newLine(); if (columnSpecs != null && !columnSpecs.isEmpty()) { Collection<String> headerList = columnSpecs.stream().map(header -> formatTableCell(defaultString(header.header))).collect(Collectors.toList()); documentBuilder.append(Markdown.TABLE_COLUMN_DELIMITER).append(join(headerList, Markdown.TABLE_COLUMN_DELIMITER.toString())).append(Markdown.TABLE_COLUMN_DELIMITER).append(newLine); documentBuilder.append(Markdown.TABLE_COLUMN_DELIMITER); columnSpecs.forEach(col -> { documentBuilder.append(StringUtils.repeat(Markdown.TABLE_ROW.toString(), 3)); documentBuilder.append(Markdown.TABLE_COLUMN_DELIMITER); }); documentBuilder.append(newLine); } for (List<String> row : cells) { Collection<String> cellList = row.stream().map(cell -> formatTableCell(defaultString(cell))).collect(Collectors.toList()); documentBuilder.append(Markdown.TABLE_COLUMN_DELIMITER).append(join(cellList, Markdown.TABLE_COLUMN_DELIMITER.toString())).append(Markdown.TABLE_COLUMN_DELIMITER).append(newLine); } newLine(); return this; } @Override public MarkupDocBuilder newLine(boolean forceLineBreak) { newLine(Markdown.LINE_BREAK, forceLineBreak); return this; } @Override public MarkupDocBuilder importMarkup(Reader markupText, MarkupLanguage markupLanguage, int levelOffset) { importMarkupStyle1(TITLE_PATTERN, Markdown.TITLE, markupText, markupLanguage, levelOffset); return this; } @Override public String addFileExtension(String fileName) { return fileName + MarkupLanguage.MARKDOWN.getFileNameExtensions().get(0); } }
package io.yawp.repository.pipes; import io.yawp.commons.utils.ReflectionUtils; import io.yawp.repository.Feature; import io.yawp.repository.IdRef; import io.yawp.repository.Repository; import io.yawp.repository.pipes.pump.IdPump; import io.yawp.repository.pipes.pump.PumpGenerator; import io.yawp.repository.query.QueryBuilder; import java.util.List; import java.util.Set; /** * Pipe API * <p/> * The Pipe API is used to create an asynchronous information flow from * one endpoint model (source) to another (sink). It can be used to * create a variety of aggregation models without creating scalability * bottle-necks. * * @param <T> The source endpoint model type. * @param <S> The sink endpoint model type. */ public abstract class Pipe<T, S> extends Feature { private static final int BATCH_SIZE = 30; private Class<T> sourceClazz; private Class<S> sinkClazz; private IdPump<T> sourcePump; private IdPump<S> sinkPump; public static Pipe newInstance(Repository r, Class<? extends Pipe> pipeClazz) { try { Class<?> sourceClazz = ReflectionUtils.getFeatureEndpointClazz(pipeClazz); Class<?> sinkClazz = ReflectionUtils.getFeatureTypeArgumentAt(pipeClazz, 1); Pipe pipe = pipeClazz.newInstance(); pipe.setRepository(r); pipe.init(sourceClazz, sinkClazz); return pipe; } catch (InstantiationException | IllegalAccessException e) { throw new RuntimeException(e); } } public final void init(Class<T> sourceClazz, Class<S> sinkClazz) { this.sourceClazz = sourceClazz; this.sinkClazz = sinkClazz; this.sourcePump = new IdPump<>(sourceClazz, BATCH_SIZE); this.sinkPump = new IdPump<>(sinkClazz, BATCH_SIZE); } /** * Override this method to specify a custom queue for this pipe. * * @return Queue name */ public String getQueueName() { return null; } /** * Override this method to configure one or multiple sinks for a given source. * <p/> * Call {@link #addSinkId(IdRef<S>)} for each sink you want to pipe the source. * <p/> * <b>Note:</b> the sinkIds should be retrieved in a strong consistent way * (ancestor query or key fetch in GAE), otherwise the pipe may become * inconsistent. * * @param source The source that needs to be piped to a sink. */ public abstract void configureSinks(T source); /** * Call this method from {@link #configureSinks(T)} to add a sink id for * a given source. * <p/> * <b>Note:</b> the sink id should be retrieved in a strong consistent way * (ancestor query or key fetch in GAE), otherwise the pipe may become * inconsistent. * * @param id The sink id. */ public final void addSinkId(IdRef<S> id) { sinkPump.add(id); } /** * Call this method from {@link #configureSinks(T)} to add list of sink ids * for a given source. * <p/> * <b>Note:</b> the sink ids should be retrieved in a strong consistent way * (ancestor query or key fetch in GAE), otherwise the pipe may become * inconsistent. * * @param ids The sink ids. */ public final void addSinkIds(List<IdRef<S>> ids) { sinkPump.addAll(ids); } /** * Call this method from {@link #configureSinks(T)} to add a query of sink ids * for a given source. * <p/> * <b>Note:</b> the sink ids should be retrieved in a strong consistent way * (ancestor query or key fetch in GAE), otherwise the pipe may become * inconsistent. * * @param query The {@link QueryBuilder<S>} to query for sink ids. */ public final void addSinkIdsQuery(QueryBuilder<S> query) { sinkPump.addQuery(query); } /** * Call this method from {@link #configureSinks(T)} to add a {@link PumpGenerator<IdRef<S>>} * of sink ids for a given source. * <p/> * <b>Note:</b> the sink ids should be retrieved in a strong consistent way * (ancestor query or key fetch in GAE), otherwise the pipe may become * inconsistent. * * @param generator The generator. */ public final void addSinkIdsGenerator(PumpGenerator<IdRef<S>> generator) { sinkPump.addGenerator(generator); } /** * Override this method to flux information from the source to sink. * This method will be invoked asynchronously when the source is created * or updated and the source has been associated with the sink. * * @param source The source object. * @param sink The sink object. */ public abstract void flux(T source, S sink); /** * Override this method to reflux source information from the sink. * This method will be invoked asynchronously when the source is updated * or destroyed and the source is no longer associated with the sink. * * @param source The source object. * @param sink The sink object. */ public abstract void reflux(T source, S sink); /** * Override this method to decide if a sink needs to be reflowed after * it is created or updated. * <p/> * The sink will be reflowed asynchronously by fluxing all sources * configured in {@link #configureSources(S)}. * * @param newSink The sink object containing its new data. * @param oldSink The sink object containing its previous data. * It will be null if the sink is being created. * @return Whether the sink needs to be reloaded. */ public boolean reflowCondition(S newSink, S oldSink) { return false; } /** * Override this method to define configure one or multiple source objects * to be fluxed when the specified sink is reflowed. * <p/> * Call {@link #addSourceId(IdRef<T>)}, {@link #addSourceIds(List<T>)} or * {@link #addSourceIdsQuery(QueryBuilder<T>)} to specify which sources should be * reflowed to the sink. * <p/> * <b>Note:</b> the sources should be retrieved in a strong consistent way * (ancestor query in GAE), otherwise the pipe may become inconsistent. * * @param sink The sink object. */ public void configureSources(S sink) { } /** * Call this method from {@link #configureSources(S)} to add * source ids to be fluxed when the specified sink is reflowed. * <p/> * <b>Note:</b> the source should be retrieved in a strong consistent way * (ancestor query or key fetch in GAE), otherwise the pipe may become * inconsistent. * * @param source The source id. */ public void addSourceId(IdRef<T> source) { sourcePump.add(source); } /** * Call this method from {@link #configureSources(S)} to add a list of * source ids to be fluxed when the specified sink is reflowed. * <p/> * <b>Note:</b> the sources should be retrieved in a strong consistent way * (ancestor query or key fetch in GAE), otherwise the pipe may become * inconsistent. * * @param sources The list of source objects. */ public void addSourceIds(List<IdRef<T>> sources) { sourcePump.addAll(sources); } /** * Call this method from {@link #configureSources(S)} to add a query for * source ids to be fluxed when the specified sink is reflowed. * <p/> * <b>Note:</b> this query should be strong consistent (ancestor query * in GAE), otherwise the pipe may become inconsistent. * * @param query The {@link QueryBuilder<T>} to query for sources. */ public void addSourceIdsQuery(QueryBuilder<T> query) { sourcePump.addQuery(query); } /** * Call this method from {@link #configureSources(S)} to add a {@link PumpGenerator<IdRef<T>>} * of source ids for a given sink. * <p/> * <b>Note:</b> the sources should be retrieved in a strong consistent way * (ancestor query or key fetch in GAE), otherwise the pipe may become * inconsistent. * * @param generator The generator. */ public void addSourceIdsGenerator(PumpGenerator<IdRef<T>> generator) { sourcePump.addGenerator(generator); } /** * Override this method to empty the sink before it is reloaded. * * @param sink The sink object. */ public void drain(S sink) { } public final Set<IdRef<S>> allSinks() { return sinkPump.all(); } public final boolean hasSinks() { return sinkPump.hasMore(); } public final void forceSink(IdRef<S> sinkId) { sinkPump = new IdPump<>(sinkClazz, BATCH_SIZE); sinkPump.add(sinkId); } public IdPump<T> getSourcePump() { return sourcePump; } }
/************************************************************************* * Compilation: javac Point2D.java * Execution: java Point2D x0 y0 N * Dependencies: StdDraw.java StdRandom.java * * Immutable point data type for points in the plane. * *************************************************************************/ import java.util.Arrays; import java.util.Comparator; /** * The <tt>Point</tt> class is an immutable data type to encapsulate a * two-dimensional point with real-value coordinates. * <p> * Note: in order to deal with the difference behavior of double and * Double with respect to -0.0 and +0.0, the Point2D constructor converts * any coordinates that are -0.0 to +0.0. * * For additional documentation, see <a href="/algs4/12oop">Section 1.2</a> of * <i>Algorithms, 4th Edition</i> by Robert Sedgewick and Kevin Wayne. * * @author Robert Sedgewick * @author Kevin Wayne */ public class Point2D implements Comparable<Point2D> { /** * Compares two points by x-coordinate. */ public static final Comparator<Point2D> X_ORDER = new XOrder(); /** * Compares two points by y-coordinate. */ public static final Comparator<Point2D> Y_ORDER = new YOrder(); /** * Compares two points by polar radius. */ public static final Comparator<Point2D> R_ORDER = new ROrder(); /** * Compares two points by polar angle (between 0 and 2pi) with respect to this point. */ public final Comparator<Point2D> POLAR_ORDER = new PolarOrder(); /** * Compares two points by atan2() angle (between -pi and pi) with respect to this point. */ public final Comparator<Point2D> ATAN2_ORDER = new Atan2Order(); /** * Compares two points by distance to this point. */ public final Comparator<Point2D> DISTANCE_TO_ORDER = new DistanceToOrder(); private final double x; // x coordinate private final double y; // y coordinate /** * Initializes a new point (x, y). * @param x the x-coordinate * @param y the y-coordinate * @throws IllegalArgumentException if either <tt>x</tt> or <tt>y</tt> * is <tt>Double.NaN</tt>, <tt>Double.POSITIVE_INFINITY</tt> or * <tt>Double.NEGATIVE_INFINITY</tt> */ public Point2D(double x, double y) { if (Double.isInfinite(x) || Double.isInfinite(y)) throw new IllegalArgumentException("Coordinates must be finite"); if (Double.isNaN(x) || Double.isNaN(y)) throw new IllegalArgumentException("Coordinates cannot be NaN"); if (x == 0.0) x = 0.0; // convert -0.0 to +0.0 if (y == 0.0) y = 0.0; // convert -0.0 to +0.0 this.x = x; this.y = y; } /** * Returns the x-coordinate. * @return the x-coordinate */ public double x() { return x; } /** * Returns the y-coordinate. * @return the y-coordinate */ public double y() { return y; } /** * Returns the polar radius of this point. * @return the polar radius of this point in polar coordiantes: sqrt(x*x + y*y) */ public double r() { return Math.sqrt(x*x + y*y); } /** * Returns the angle of this point in polar coordinates. * @return the angle (in radians) of this point in polar coordiantes (between -pi/2 and pi/2) */ public double theta() { return Math.atan2(y, x); } /** * Returns the angle between this point and that point. * @return the angle in radians (between -pi and pi) between this point and that point (0 if equal) */ private double angleTo(Point2D that) { double dx = that.x - this.x; double dy = that.y - this.y; return Math.atan2(dy, dx); } /** * Is a->b->c a counterclockwise turn? * @param a first point * @param b second point * @param c third point * @return { -1, 0, +1 } if a->b->c is a { clockwise, collinear; counterclocwise } turn. */ public static int ccw(Point2D a, Point2D b, Point2D c) { double area2 = (b.x-a.x)*(c.y-a.y) - (b.y-a.y)*(c.x-a.x); if (area2 < 0) return -1; else if (area2 > 0) return +1; else return 0; } /** * Returns twice the signed area of the triangle a-b-c. * @param a first point * @param b second point * @param c third point * @return twice the signed area of the triangle a-b-c */ public static double area2(Point2D a, Point2D b, Point2D c) { return (b.x-a.x)*(c.y-a.y) - (b.y-a.y)*(c.x-a.x); } /** * Returns the Euclidean distance between this point and that point. * @param that the other point * @return the Euclidean distance between this point and that point */ public double distanceTo(Point2D that) { double dx = this.x - that.x; double dy = this.y - that.y; return Math.sqrt(dx*dx + dy*dy); } /** * Returns the square of the Euclidean distance between this point and that point. * @param that the other point * @return the square of the Euclidean distance between this point and that point */ public double distanceSquaredTo(Point2D that) { double dx = this.x - that.x; double dy = this.y - that.y; return dx*dx + dy*dy; } /** * Compares this point to that point by y-coordinate, breaking ties by x-coordinate. * @param that the other point * @return { a negative integer, zero, a positive integer } if this point is * { less than, equal to, greater than } that point */ public int compareTo(Point2D that) { if (this.y < that.y) return -1; if (this.y > that.y) return +1; if (this.x < that.x) return -1; if (this.x > that.x) return +1; return 0; } // compare points according to their x-coordinate private static class XOrder implements Comparator<Point2D> { public int compare(Point2D p, Point2D q) { if (p.x < q.x) return -1; if (p.x > q.x) return +1; return 0; } } // compare points according to their y-coordinate private static class YOrder implements Comparator<Point2D> { public int compare(Point2D p, Point2D q) { if (p.y < q.y) return -1; if (p.y > q.y) return +1; return 0; } } // compare points according to their polar radius private static class ROrder implements Comparator<Point2D> { public int compare(Point2D p, Point2D q) { double delta = (p.x*p.x + p.y*p.y) - (q.x*q.x + q.y*q.y); if (delta < 0) return -1; if (delta > 0) return +1; return 0; } } // compare other points relative to atan2 angle (bewteen -pi/2 and pi/2) they make with this Point private class Atan2Order implements Comparator<Point2D> { public int compare(Point2D q1, Point2D q2) { double angle1 = angleTo(q1); double angle2 = angleTo(q2); if (angle1 < angle2) return -1; else if (angle1 > angle2) return +1; else return 0; } } // compare other points relative to polar angle (between 0 and 2pi) they make with this Point private class PolarOrder implements Comparator<Point2D> { public int compare(Point2D q1, Point2D q2) { double dx1 = q1.x - x; double dy1 = q1.y - y; double dx2 = q2.x - x; double dy2 = q2.y - y; if (dy1 >= 0 && dy2 < 0) return -1; // q1 above; q2 below else if (dy2 >= 0 && dy1 < 0) return +1; // q1 below; q2 above else if (dy1 == 0 && dy2 == 0) { // 3-collinear and horizontal if (dx1 >= 0 && dx2 < 0) return -1; else if (dx2 >= 0 && dx1 < 0) return +1; else return 0; } else return -ccw(Point2D.this, q1, q2); // both above or below // Note: ccw() recomputes dx1, dy1, dx2, and dy2 } } // compare points according to their distance to this point private class DistanceToOrder implements Comparator<Point2D> { public int compare(Point2D p, Point2D q) { double dist1 = distanceSquaredTo(p); double dist2 = distanceSquaredTo(q); if (dist1 < dist2) return -1; else if (dist1 > dist2) return +1; else return 0; } } /** * Does this point equal y? * @param other the other point * @return true if this point equals the other point; false otherwise */ public boolean equals(Object other) { if (other == this) return true; if (other == null) return false; if (other.getClass() != this.getClass()) return false; Point2D that = (Point2D) other; return this.x == that.x && this.y == that.y; } /** * Return a string representation of this point. * @return a string representation of this point in the format (x, y) */ public String toString() { return "(" + x + ", " + y + ")"; } /** * Returns an integer hash code for this point. * @return an integer hash code for this point */ public int hashCode() { int hashX = ((Double) x).hashCode(); int hashY = ((Double) y).hashCode(); return 31*hashX + hashY; } /** * Plot this point using standard draw. */ public void draw() { StdDraw.point(x, y); } /** * Plot a line from this point to that point using standard draw. * @param that the other point */ public void drawTo(Point2D that) { StdDraw.line(this.x, this.y, that.x, that.y); } /** * Unit tests the point data type. */ public static void main(String[] args) { int x0 = Integer.parseInt(args[0]); int y0 = Integer.parseInt(args[1]); int N = Integer.parseInt(args[2]); StdDraw.setCanvasSize(800, 800); StdDraw.setXscale(0, 100); StdDraw.setYscale(0, 100); StdDraw.setPenRadius(.005); Point2D[] points = new Point2D[N]; for (int i = 0; i < N; i++) { int x = StdRandom.uniform(100); int y = StdRandom.uniform(100); points[i] = new Point2D(x, y); points[i].draw(); } // draw p = (x0, x1) in red Point2D p = new Point2D(x0, y0); StdDraw.setPenColor(StdDraw.RED); StdDraw.setPenRadius(.02); p.draw(); // draw line segments from p to each point, one at a time, in polar order StdDraw.setPenRadius(); StdDraw.setPenColor(StdDraw.BLUE); Arrays.sort(points, p.POLAR_ORDER); for (int i = 0; i < N; i++) { p.drawTo(points[i]); StdDraw.show(100); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode.snapshot; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Loader.loadINodeDirectory; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Loader.loadPermission; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Loader.updateBlocksMap; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Saver.buildINodeDirectory; import static org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode.Saver.buildINodeFile; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import com.google.common.collect.ImmutableList; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTypeProto; import org.apache.hadoop.hdfs.protocolPB.PBHelperClient; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.namenode.AclEntryStatusFormat; import org.apache.hadoop.hdfs.server.namenode.AclFeature; import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.FSImage; import org.apache.hadoop.hdfs.server.namenode.FSImageFormatPBINode; import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf; import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.LoaderContext; import org.apache.hadoop.hdfs.server.namenode.FSImageFormatProtobuf.SectionName; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.FileSummary; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeReferenceSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.CreatedListEntry; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotDiffSection.DiffEntry.Type; import org.apache.hadoop.hdfs.server.namenode.FsImageProto.SnapshotSection; import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INodeDirectory; import org.apache.hadoop.hdfs.server.namenode.INodeDirectoryAttributes; import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.hdfs.server.namenode.INodeFileAttributes; import org.apache.hadoop.hdfs.server.namenode.INodeMap; import org.apache.hadoop.hdfs.server.namenode.INodeReference; import org.apache.hadoop.hdfs.server.namenode.INodeReference.DstReference; import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount; import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName; import org.apache.hadoop.hdfs.server.namenode.INodeWithAdditionalFields; import org.apache.hadoop.hdfs.server.namenode.QuotaByStorageTypeEntry; import org.apache.hadoop.hdfs.server.namenode.SaveNamespaceContext; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiff; import org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature.DirectoryDiffList; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot.Root; import org.apache.hadoop.hdfs.server.namenode.XAttrFeature; import org.apache.hadoop.hdfs.util.EnumCounters; import com.google.common.base.Preconditions; import com.google.protobuf.ByteString; @InterfaceAudience.Private public class FSImageFormatPBSnapshot { /** * Loading snapshot related information from protobuf based FSImage */ public final static class Loader { private final FSNamesystem fsn; private final FSDirectory fsDir; private final FSImageFormatProtobuf.Loader parent; private final Map<Integer, Snapshot> snapshotMap; public Loader(FSNamesystem fsn, FSImageFormatProtobuf.Loader parent) { this.fsn = fsn; this.fsDir = fsn.getFSDirectory(); this.snapshotMap = new HashMap<Integer, Snapshot>(); this.parent = parent; } /** * The sequence of the ref node in refList must be strictly the same with * the sequence in fsimage */ public void loadINodeReferenceSection(InputStream in) throws IOException { final List<INodeReference> refList = parent.getLoaderContext() .getRefList(); while (true) { INodeReferenceSection.INodeReference e = INodeReferenceSection .INodeReference.parseDelimitedFrom(in); if (e == null) { break; } INodeReference ref = loadINodeReference(e); refList.add(ref); } } private INodeReference loadINodeReference( INodeReferenceSection.INodeReference r) { long referredId = r.getReferredId(); INode referred = fsDir.getInode(referredId); WithCount withCount = (WithCount) referred.getParentReference(); if (withCount == null) { withCount = new INodeReference.WithCount(null, referred); } final INodeReference ref; if (r.hasDstSnapshotId()) { // DstReference ref = new INodeReference.DstReference(null, withCount, r.getDstSnapshotId()); } else { ref = new INodeReference.WithName(null, withCount, r.getName() .toByteArray(), r.getLastSnapshotId()); } return ref; } /** * Load the snapshots section from fsimage. Also add snapshottable feature * to snapshottable directories. */ public void loadSnapshotSection(InputStream in) throws IOException { SnapshotManager sm = fsn.getSnapshotManager(); SnapshotSection section = SnapshotSection.parseDelimitedFrom(in); int snum = section.getNumSnapshots(); sm.setNumSnapshots(snum); sm.setSnapshotCounter(section.getSnapshotCounter()); for (long sdirId : section.getSnapshottableDirList()) { INodeDirectory dir = fsDir.getInode(sdirId).asDirectory(); if (!dir.isSnapshottable()) { dir.addSnapshottableFeature(); } else { // dir is root, and admin set root to snapshottable before dir.setSnapshotQuota( DirectorySnapshottableFeature.SNAPSHOT_QUOTA_DEFAULT); } sm.addSnapshottable(dir); } loadSnapshots(in, snum); } private void loadSnapshots(InputStream in, int size) throws IOException { for (int i = 0; i < size; i++) { SnapshotSection.Snapshot pbs = SnapshotSection.Snapshot .parseDelimitedFrom(in); INodeDirectory root = loadINodeDirectory(pbs.getRoot(), parent.getLoaderContext()); int sid = pbs.getSnapshotId(); INodeDirectory parent = fsDir.getInode(root.getId()).asDirectory(); Snapshot snapshot = new Snapshot(sid, root, parent); // add the snapshot to parent, since we follow the sequence of // snapshotsByNames when saving, we do not need to sort when loading parent.getDirectorySnapshottableFeature().addSnapshot(snapshot); snapshotMap.put(sid, snapshot); } } /** * Load the snapshot diff section from fsimage. */ public void loadSnapshotDiffSection(InputStream in) throws IOException { final List<INodeReference> refList = parent.getLoaderContext() .getRefList(); while (true) { SnapshotDiffSection.DiffEntry entry = SnapshotDiffSection.DiffEntry .parseDelimitedFrom(in); if (entry == null) { break; } long inodeId = entry.getInodeId(); INode inode = fsDir.getInode(inodeId); SnapshotDiffSection.DiffEntry.Type type = entry.getType(); switch (type) { case FILEDIFF: loadFileDiffList(in, inode.asFile(), entry.getNumOfDiff()); break; case DIRECTORYDIFF: loadDirectoryDiffList(in, inode.asDirectory(), entry.getNumOfDiff(), refList); break; } } } /** Load FileDiff list for a file with snapshot feature */ private void loadFileDiffList(InputStream in, INodeFile file, int size) throws IOException { final FileDiffList diffs = new FileDiffList(); final LoaderContext state = parent.getLoaderContext(); final BlockManager bm = fsn.getBlockManager(); for (int i = 0; i < size; i++) { SnapshotDiffSection.FileDiff pbf = SnapshotDiffSection.FileDiff .parseDelimitedFrom(in); INodeFileAttributes copy = null; if (pbf.hasSnapshotCopy()) { INodeSection.INodeFile fileInPb = pbf.getSnapshotCopy(); PermissionStatus permission = loadPermission( fileInPb.getPermission(), state.getStringTable()); AclFeature acl = null; if (fileInPb.hasAcl()) { int[] entries = AclEntryStatusFormat .toInt(FSImageFormatPBINode.Loader.loadAclEntries( fileInPb.getAcl(), state.getStringTable())); acl = new AclFeature(entries); } XAttrFeature xAttrs = null; if (fileInPb.hasXAttrs()) { xAttrs = new XAttrFeature(FSImageFormatPBINode.Loader.loadXAttrs( fileInPb.getXAttrs(), state.getStringTable())); } boolean isStriped = (fileInPb.getBlockType() == BlockTypeProto .STRIPED); Short replication = (!isStriped ? (short)fileInPb.getReplication() : null); Byte ecPolicyID = (isStriped ? (byte)fileInPb.getErasureCodingPolicyID() : null); copy = new INodeFileAttributes.SnapshotCopy(pbf.getName() .toByteArray(), permission, acl, fileInPb.getModificationTime(), fileInPb.getAccessTime(), replication, ecPolicyID, fileInPb.getPreferredBlockSize(), (byte)fileInPb.getStoragePolicyID(), xAttrs, PBHelperClient.convert(fileInPb.getBlockType())); } FileDiff diff = new FileDiff(pbf.getSnapshotId(), copy, null, pbf.getFileSize()); List<BlockProto> bpl = pbf.getBlocksList(); // in file diff there can only be contiguous blocks BlockInfo[] blocks = new BlockInfo[bpl.size()]; for(int j = 0, e = bpl.size(); j < e; ++j) { Block blk = PBHelperClient.convert(bpl.get(j)); BlockInfo storedBlock = bm.getStoredBlock(blk); if(storedBlock == null) { storedBlock = (BlockInfoContiguous) fsn.getBlockManager() .addBlockCollectionWithCheck(new BlockInfoContiguous(blk, copy.getFileReplication()), file); } blocks[j] = storedBlock; } if(blocks.length > 0) { diff.setBlocks(blocks); } diffs.addFirst(diff); } file.addSnapshotFeature(diffs); short repl = file.getPreferredBlockReplication(); for (BlockInfo b : file.getBlocks()) { if (b.getReplication() < repl) { bm.setReplication(b.getReplication(), repl, b); } } } /** Load the created list in a DirectoryDiff */ private List<INode> loadCreatedList(InputStream in, INodeDirectory dir, int size) throws IOException { List<INode> clist = new ArrayList<INode>(size); for (long c = 0; c < size; c++) { CreatedListEntry entry = CreatedListEntry.parseDelimitedFrom(in); INode created = SnapshotFSImageFormat.loadCreated(entry.getName() .toByteArray(), dir); clist.add(created); } return clist; } private void addToDeletedList(INode dnode, INodeDirectory parent) { dnode.setParent(parent); if (dnode.isFile()) { updateBlocksMap(dnode.asFile(), fsn.getBlockManager()); } } /** * Load the deleted list in a DirectoryDiff */ private List<INode> loadDeletedList(final List<INodeReference> refList, InputStream in, INodeDirectory dir, List<Long> deletedNodes, List<Integer> deletedRefNodes) throws IOException { List<INode> dlist = new ArrayList<INode>(deletedRefNodes.size() + deletedNodes.size()); // load non-reference inodes for (long deletedId : deletedNodes) { INode deleted = fsDir.getInode(deletedId); dlist.add(deleted); addToDeletedList(deleted, dir); } // load reference nodes in the deleted list for (int refId : deletedRefNodes) { INodeReference deletedRef = refList.get(refId); dlist.add(deletedRef); addToDeletedList(deletedRef, dir); } Collections.sort(dlist, new Comparator<INode>() { @Override public int compare(INode n1, INode n2) { return n1.compareTo(n2.getLocalNameBytes()); } }); return dlist; } /** Load DirectoryDiff list for a directory with snapshot feature */ private void loadDirectoryDiffList(InputStream in, INodeDirectory dir, int size, final List<INodeReference> refList) throws IOException { if (!dir.isWithSnapshot()) { dir.addSnapshotFeature(null); } DirectoryDiffList diffs = dir.getDiffs(); final LoaderContext state = parent.getLoaderContext(); for (int i = 0; i < size; i++) { // load a directory diff SnapshotDiffSection.DirectoryDiff diffInPb = SnapshotDiffSection. DirectoryDiff.parseDelimitedFrom(in); final int snapshotId = diffInPb.getSnapshotId(); final Snapshot snapshot = snapshotMap.get(snapshotId); int childrenSize = diffInPb.getChildrenSize(); boolean useRoot = diffInPb.getIsSnapshotRoot(); INodeDirectoryAttributes copy = null; if (useRoot) { copy = snapshot.getRoot(); } else if (diffInPb.hasSnapshotCopy()) { INodeSection.INodeDirectory dirCopyInPb = diffInPb.getSnapshotCopy(); final byte[] name = diffInPb.getName().toByteArray(); PermissionStatus permission = loadPermission( dirCopyInPb.getPermission(), state.getStringTable()); AclFeature acl = null; if (dirCopyInPb.hasAcl()) { int[] entries = AclEntryStatusFormat .toInt(FSImageFormatPBINode.Loader.loadAclEntries( dirCopyInPb.getAcl(), state.getStringTable())); acl = new AclFeature(entries); } XAttrFeature xAttrs = null; if (dirCopyInPb.hasXAttrs()) { xAttrs = new XAttrFeature(FSImageFormatPBINode.Loader.loadXAttrs( dirCopyInPb.getXAttrs(), state.getStringTable())); } long modTime = dirCopyInPb.getModificationTime(); boolean noQuota = dirCopyInPb.getNsQuota() == -1 && dirCopyInPb.getDsQuota() == -1 && (!dirCopyInPb.hasTypeQuotas()); if (noQuota) { copy = new INodeDirectoryAttributes.SnapshotCopy(name, permission, acl, modTime, xAttrs); } else { EnumCounters<StorageType> typeQuotas = null; if (dirCopyInPb.hasTypeQuotas()) { ImmutableList<QuotaByStorageTypeEntry> qes = FSImageFormatPBINode.Loader.loadQuotaByStorageTypeEntries( dirCopyInPb.getTypeQuotas()); typeQuotas = new EnumCounters<StorageType>(StorageType.class, HdfsConstants.QUOTA_RESET); for (QuotaByStorageTypeEntry qe : qes) { if (qe.getQuota() >= 0 && qe.getStorageType() != null && qe.getStorageType().supportTypeQuota()) { typeQuotas.set(qe.getStorageType(), qe.getQuota()); } } } copy = new INodeDirectoryAttributes.CopyWithQuota(name, permission, acl, modTime, dirCopyInPb.getNsQuota(), dirCopyInPb.getDsQuota(), typeQuotas, xAttrs); } } // load created list List<INode> clist = loadCreatedList(in, dir, diffInPb.getCreatedListSize()); // load deleted list List<INode> dlist = loadDeletedList(refList, in, dir, diffInPb.getDeletedINodeList(), diffInPb.getDeletedINodeRefList()); // create the directory diff DirectoryDiff diff = new DirectoryDiff(snapshotId, copy, null, childrenSize, clist, dlist, useRoot); diffs.addFirst(diff); } } } /** * Saving snapshot related information to protobuf based FSImage */ public final static class Saver { private final FSNamesystem fsn; private final FileSummary.Builder headers; private final FSImageFormatProtobuf.Saver parent; private final SaveNamespaceContext context; private long numImageErrors; public Saver(FSImageFormatProtobuf.Saver parent, FileSummary.Builder headers, SaveNamespaceContext context, FSNamesystem fsn) { this.parent = parent; this.headers = headers; this.context = context; this.fsn = fsn; this.numImageErrors = 0; } /** * save all the snapshottable directories and snapshots to fsimage */ public void serializeSnapshotSection(OutputStream out) throws IOException { SnapshotManager sm = fsn.getSnapshotManager(); SnapshotSection.Builder b = SnapshotSection.newBuilder() .setSnapshotCounter(sm.getSnapshotCounter()) .setNumSnapshots(sm.getNumSnapshots()); INodeDirectory[] snapshottables = sm.getSnapshottableDirs(); for (INodeDirectory sdir : snapshottables) { b.addSnapshottableDir(sdir.getId()); } b.build().writeDelimitedTo(out); int i = 0; for(INodeDirectory sdir : snapshottables) { for (Snapshot s : sdir.getDirectorySnapshottableFeature() .getSnapshotList()) { Root sroot = s.getRoot(); SnapshotSection.Snapshot.Builder sb = SnapshotSection.Snapshot .newBuilder().setSnapshotId(s.getId()); INodeSection.INodeDirectory.Builder db = buildINodeDirectory(sroot, parent.getSaverContext()); INodeSection.INode r = INodeSection.INode.newBuilder() .setId(sroot.getId()) .setType(INodeSection.INode.Type.DIRECTORY) .setName(ByteString.copyFrom(sroot.getLocalNameBytes())) .setDirectory(db).build(); sb.setRoot(r).build().writeDelimitedTo(out); i++; if (i % FSImageFormatProtobuf.Saver.CHECK_CANCEL_INTERVAL == 0) { context.checkCancelled(); } } } Preconditions.checkState(i == sm.getNumSnapshots()); parent.commitSection(headers, FSImageFormatProtobuf.SectionName.SNAPSHOT); } /** * This can only be called after serializing both INode_Dir and SnapshotDiff */ public void serializeINodeReferenceSection(OutputStream out) throws IOException { final List<INodeReference> refList = parent.getSaverContext() .getRefList(); long i = 0; for (INodeReference ref : refList) { INodeReferenceSection.INodeReference.Builder rb = buildINodeReference(ref, i++); rb.build().writeDelimitedTo(out); } parent.commitSection(headers, SectionName.INODE_REFERENCE); } private INodeReferenceSection.INodeReference.Builder buildINodeReference( final INodeReference ref, final long refIndex) throws IOException { INodeReferenceSection.INodeReference.Builder rb = INodeReferenceSection.INodeReference.newBuilder(). setReferredId(ref.getId()); if (ref instanceof WithName) { rb.setLastSnapshotId(((WithName) ref).getLastSnapshotId()).setName( ByteString.copyFrom(ref.getLocalNameBytes())); } else if (ref instanceof DstReference) { rb.setDstSnapshotId(ref.getDstSnapshotId()); } if (fsn.getFSDirectory().getInode(ref.getId()) == null) { FSImage.LOG.error( "FSImageFormatPBSnapshot: Missing referred INodeId " + ref.getId() + " for INodeReference index " + refIndex + "; path=" + ref.getFullPathName() + "; parent=" + (ref.getParent() == null ? "null" : ref.getParent().getFullPathName())); ++numImageErrors; } return rb; } /** * save all the snapshot diff to fsimage */ public void serializeSnapshotDiffSection(OutputStream out) throws IOException { INodeMap inodesMap = fsn.getFSDirectory().getINodeMap(); final List<INodeReference> refList = parent.getSaverContext() .getRefList(); int i = 0; Iterator<INodeWithAdditionalFields> iter = inodesMap.getMapIterator(); while (iter.hasNext()) { INodeWithAdditionalFields inode = iter.next(); if (inode.isFile()) { serializeFileDiffList(inode.asFile(), out); } else if (inode.isDirectory()) { serializeDirDiffList(inode.asDirectory(), refList, out); } ++i; if (i % FSImageFormatProtobuf.Saver.CHECK_CANCEL_INTERVAL == 0) { context.checkCancelled(); } } parent.commitSection(headers, FSImageFormatProtobuf.SectionName.SNAPSHOT_DIFF); } private void serializeFileDiffList(INodeFile file, OutputStream out) throws IOException { FileWithSnapshotFeature sf = file.getFileWithSnapshotFeature(); if (sf != null) { DiffList<FileDiff> diffList = sf.getDiffs().asList(); SnapshotDiffSection.DiffEntry entry = SnapshotDiffSection.DiffEntry .newBuilder().setInodeId(file.getId()).setType(Type.FILEDIFF) .setNumOfDiff(diffList.size()).build(); entry.writeDelimitedTo(out); for (int i = diffList.size() - 1; i >= 0; i--) { FileDiff diff = diffList.get(i); SnapshotDiffSection.FileDiff.Builder fb = SnapshotDiffSection.FileDiff .newBuilder().setSnapshotId(diff.getSnapshotId()) .setFileSize(diff.getFileSize()); if(diff.getBlocks() != null) { for(Block block : diff.getBlocks()) { fb.addBlocks(PBHelperClient.convert(block)); } } INodeFileAttributes copy = diff.snapshotINode; if (copy != null) { fb.setName(ByteString.copyFrom(copy.getLocalNameBytes())) .setSnapshotCopy(buildINodeFile(copy, parent.getSaverContext())); } fb.build().writeDelimitedTo(out); } } } private void saveCreatedList(List<INode> created, OutputStream out) throws IOException { // local names of the created list member for (INode c : created) { SnapshotDiffSection.CreatedListEntry.newBuilder() .setName(ByteString.copyFrom(c.getLocalNameBytes())).build() .writeDelimitedTo(out); } } private void serializeDirDiffList(INodeDirectory dir, final List<INodeReference> refList, OutputStream out) throws IOException { DirectoryWithSnapshotFeature sf = dir.getDirectoryWithSnapshotFeature(); if (sf != null) { DiffList<DirectoryDiff> diffList = sf.getDiffs().asList(); SnapshotDiffSection.DiffEntry entry = SnapshotDiffSection.DiffEntry .newBuilder().setInodeId(dir.getId()).setType(Type.DIRECTORYDIFF) .setNumOfDiff(diffList.size()).build(); entry.writeDelimitedTo(out); for (int i = diffList.size() - 1; i >= 0; i--) { // reverse order! DirectoryDiff diff = diffList.get(i); SnapshotDiffSection.DirectoryDiff.Builder db = SnapshotDiffSection. DirectoryDiff.newBuilder().setSnapshotId(diff.getSnapshotId()) .setChildrenSize(diff.getChildrenSize()) .setIsSnapshotRoot(diff.isSnapshotRoot()); INodeDirectoryAttributes copy = diff.snapshotINode; if (!diff.isSnapshotRoot() && copy != null) { db.setName(ByteString.copyFrom(copy.getLocalNameBytes())) .setSnapshotCopy( buildINodeDirectory(copy, parent.getSaverContext())); } // process created list and deleted list List<INode> created = diff.getChildrenDiff().getCreatedUnmodifiable(); db.setCreatedListSize(created.size()); List<INode> deleted = diff.getChildrenDiff().getDeletedUnmodifiable(); INode previousNode = null; boolean misordered = false; for (INode d : deleted) { // getBytes() may return null below, and that is okay. final int result = previousNode == null ? -1 : previousNode.compareTo(d.getLocalNameBytes()); if (result == 0) { FSImage.LOG.error( "Name '" + d.getLocalName() + "' is repeated in the " + "'deleted' difflist of directory " + dir.getFullPathName() + ", INodeId=" + dir.getId()); ++numImageErrors; } else if (result > 0 && !misordered) { misordered = true; ++numImageErrors; } previousNode = d; if (d.isReference()) { refList.add(d.asReference()); db.addDeletedINodeRef(refList.size() - 1); } else { db.addDeletedINode(d.getId()); } } if (misordered) { FSImage.LOG.error( "Misordered entries in the 'deleted' difflist of directory " + dir.getFullPathName() + ", INodeId=" + dir.getId() + ". The full list is " + Arrays.toString(deleted.toArray())); } db.build().writeDelimitedTo(out); saveCreatedList(created, out); } } } /** * Number of non-fatal errors detected while writing the * SnapshotDiff and INodeReference sections. * @return the number of non-fatal errors detected. */ public long getNumImageErrors() { return numImageErrors; } } private FSImageFormatPBSnapshot(){} }
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.yangutils.plugin.manager; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.nio.file.Files; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarFile; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.model.Dependency; import org.apache.maven.model.Resource; import org.apache.maven.project.MavenProject; import org.onosproject.yangutils.datamodel.YangNode; import org.slf4j.Logger; import org.sonatype.plexus.build.incremental.BuildContext; import static org.onosproject.yangutils.utils.UtilConstants.HYPHEN; import static org.onosproject.yangutils.utils.UtilConstants.JAR; import static org.onosproject.yangutils.utils.UtilConstants.PERIOD; import static org.onosproject.yangutils.utils.UtilConstants.SLASH; import static org.onosproject.yangutils.utils.UtilConstants.TEMP; import static org.onosproject.yangutils.utils.UtilConstants.YANG_RESOURCES; import static org.onosproject.yangutils.utils.io.impl.YangIoUtils.getCamelCase; import static org.onosproject.yangutils.utils.io.impl.YangIoUtils.getPackageDirPathFromJavaJPackage; import static org.slf4j.LoggerFactory.getLogger; /** * Represents YANG plugin utilities. */ public final class YangPluginUtils { private static final Logger log = getLogger(YangPluginUtils.class); private static final String TARGET_RESOURCE_PATH = SLASH + TEMP + SLASH + YANG_RESOURCES + SLASH; private static final String SERIALIZED_FILE_EXTENSION = ".ser"; private YangPluginUtils() { } /** * Adds generated source directory to the compilation root. * * @param source directory * @param project current maven project * @param context current build context */ public static void addToCompilationRoot(String source, MavenProject project, BuildContext context) { project.addCompileSourceRoot(source); context.refresh(project.getBasedir()); log.info("Source directory added to compilation root: " + source); } /** * Copies YANG files to the current project's output directory. * * @param yangFileInfo list of YANG files * @param outputDir project's output directory * @param project maven project * @throws IOException when fails to copy files to destination resource directory */ public static void copyYangFilesToTarget(Set<YangFileInfo> yangFileInfo, String outputDir, MavenProject project) throws IOException { List<File> files = getListOfFile(yangFileInfo); String path = outputDir + TARGET_RESOURCE_PATH; File targetDir = new File(path); targetDir.mkdirs(); for (File file : files) { Files.copy(file.toPath(), new File(path + file.getName()).toPath(), StandardCopyOption.REPLACE_EXISTING); } addToProjectResource(outputDir + SLASH + TEMP + SLASH, project); } /** * Provides a list of files from list of strings. * * @param yangFileInfo set of yang file information * @return list of files */ private static List<File> getListOfFile(Set<YangFileInfo> yangFileInfo) { List<File> files = new ArrayList<>(); Iterator<YangFileInfo> yangFileIterator = yangFileInfo.iterator(); while (yangFileIterator.hasNext()) { YangFileInfo yangFile = yangFileIterator.next(); if (yangFile.isForTranslator()) { files.add(new File(yangFile.getYangFileName())); } } return files; } /** * Serializes data-model. * * @param directory base directory for serialized files * @param fileInfoSet YANG file info set * @param project maven project * @param operation true if need to add to resource * @throws IOException when fails to do IO operations */ public static void serializeDataModel(String directory, Set<YangFileInfo> fileInfoSet, MavenProject project, boolean operation) throws IOException { String serFileDirPath = directory + TARGET_RESOURCE_PATH; File dir = new File(serFileDirPath); dir.mkdirs(); if (operation) { addToProjectResource(directory + SLASH + TEMP + SLASH, project); } for (YangFileInfo fileInfo : fileInfoSet) { String serFileName = serFileDirPath + getCamelCase(fileInfo.getRootNode().getName(), null) + SERIALIZED_FILE_EXTENSION; fileInfo.setSerializedFile(serFileName); FileOutputStream fileOutputStream = new FileOutputStream(serFileName); ObjectOutputStream objectOutputStream = new ObjectOutputStream(fileOutputStream); objectOutputStream.writeObject(fileInfo.getRootNode()); objectOutputStream.close(); fileOutputStream.close(); } } /** * Returns de-serializes YANG data-model nodes. * * @param serailizedfileInfoSet YANG file info set * @return de-serializes YANG data-model nodes * @throws IOException when fails do IO operations */ public static List<YangNode> deSerializeDataModel(List<String> serailizedfileInfoSet) throws IOException { List<YangNode> nodes = new ArrayList<>(); for (String fileInfo : serailizedfileInfoSet) { YangNode node = null; try { FileInputStream fileInputStream = new FileInputStream(fileInfo); ObjectInputStream objectInputStream = new ObjectInputStream(fileInputStream); node = (YangNode) objectInputStream.readObject(); nodes.add(node); objectInputStream.close(); fileInputStream.close(); } catch (IOException | ClassNotFoundException e) { throw new IOException(fileInfo + " not found."); } } return nodes; } /** * Returns list of jar path. * * @param project maven project * @param localRepository local repository * @param remoteRepos remote repository * @return list of jar paths */ private static List<String> resolveDependecyJarPath(MavenProject project, ArtifactRepository localRepository, List<ArtifactRepository> remoteRepos) { StringBuilder path = new StringBuilder(); List<String> jarPaths = new ArrayList<>(); for (Object obj : project.getDependencies()) { Dependency dependency = (Dependency) obj; path.append(localRepository.getBasedir()); path.append(SLASH); path.append(getPackageDirPathFromJavaJPackage(dependency.getGroupId())); path.append(SLASH); path.append(dependency.getArtifactId()); path.append(SLASH); path.append(dependency.getVersion()); path.append(SLASH); path.append(dependency.getArtifactId() + HYPHEN + dependency.getVersion() + PERIOD + JAR); File jarFile = new File(path.toString()); if (jarFile.exists()) { jarPaths.add(path.toString()); } path.delete(0, path.length()); } for (ArtifactRepository repo : remoteRepos) { // TODO: add resolver for remote repo. } return jarPaths; } /** * Resolves inter jar dependencies. * * @param project current maven project * @param localRepository local maven repository * @param remoteRepos list of remote repository * @param directory directory for serialized files * @return list of resolved datamodel nodes * @throws IOException when fails to do IO operations */ public static List<YangNode> resolveInterJarDependencies(MavenProject project, ArtifactRepository localRepository, List<ArtifactRepository> remoteRepos, String directory) throws IOException { List<String> dependeciesJarPaths = resolveDependecyJarPath(project, localRepository, remoteRepos); List<YangNode> resolvedDataModelNodes = new ArrayList<>(); for (String dependecy : dependeciesJarPaths) { resolvedDataModelNodes.addAll(deSerializeDataModel(parseJarFile(dependecy, directory))); } return resolvedDataModelNodes; } /** * Parses jar file and returns list of serialized file names. * * @param jarFile jar file to be parsed * @param directory directory for keeping the searized files * @return list of serialized files * @throws IOException when fails to do IO operations */ public static List<String> parseJarFile(String jarFile, String directory) throws IOException { List<String> serailizedFiles = new ArrayList<>(); JarFile jar = new JarFile(jarFile); Enumeration<?> enumEntries = jar.entries(); File serializedFileDir = new File(directory); serializedFileDir.mkdirs(); while (enumEntries.hasMoreElements()) { JarEntry file = (JarEntry) enumEntries.nextElement(); if (file.getName().endsWith(SERIALIZED_FILE_EXTENSION)) { if (file.getName().contains(SLASH)) { String[] strArray = file.getName().split(SLASH); String tempPath = ""; for (int i = 0; i < strArray.length - 1; i++) { tempPath = SLASH + tempPath + SLASH + strArray[i]; } File dir = new File(directory + tempPath); dir.mkdirs(); } File serailizedFile = new File(directory + SLASH + file.getName()); if (file.isDirectory()) { serailizedFile.mkdirs(); continue; } InputStream inputStream = jar.getInputStream(file); FileOutputStream fileOutputStream = new FileOutputStream(serailizedFile); while (inputStream.available() > 0) { fileOutputStream.write(inputStream.read()); } fileOutputStream.close(); inputStream.close(); serailizedFiles.add(serailizedFile.toString()); } } jar.close(); return serailizedFiles; } /* Adds directory to resources of project */ private static void addToProjectResource(String dir, MavenProject project) { Resource rsc = new Resource(); rsc.setDirectory(dir); project.addResource(rsc); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.braket.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/braket-2019-09-01/CreateQuantumTask" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateQuantumTaskRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The action associated with the task. * </p> */ private String action; /** * <p> * The client token associated with the request. * </p> */ private String clientToken; /** * <p> * The ARN of the device to run the task on. * </p> */ private String deviceArn; /** * <p> * The parameters for the device to run the task on. * </p> */ private String deviceParameters; /** * <p> * The token for an Amazon Braket job that associates it with the quantum task. * </p> */ private String jobToken; /** * <p> * The S3 bucket to store task result files in. * </p> */ private String outputS3Bucket; /** * <p> * The key prefix for the location in the S3 bucket to store task results in. * </p> */ private String outputS3KeyPrefix; /** * <p> * The number of shots to use for the task. * </p> */ private Long shots; /** * <p> * Tags to be added to the quantum task you're creating. * </p> */ private java.util.Map<String, String> tags; /** * <p> * The action associated with the task. * </p> * <p> * This field's value must be valid JSON according to RFC 7159, including the opening and closing braces. For * example: '{"key": "value"}'. * </p> * <p> * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. * </p> * * @param action * The action associated with the task. */ public void setAction(String action) { this.action = action; } /** * <p> * The action associated with the task. * </p> * <p> * This field's value will be valid JSON according to RFC 7159, including the opening and closing braces. For * example: '{"key": "value"}'. * </p> * * @return The action associated with the task. */ public String getAction() { return this.action; } /** * <p> * The action associated with the task. * </p> * <p> * This field's value must be valid JSON according to RFC 7159, including the opening and closing braces. For * example: '{"key": "value"}'. * </p> * <p> * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. * </p> * * @param action * The action associated with the task. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest withAction(String action) { setAction(action); return this; } /** * <p> * The client token associated with the request. * </p> * * @param clientToken * The client token associated with the request. */ public void setClientToken(String clientToken) { this.clientToken = clientToken; } /** * <p> * The client token associated with the request. * </p> * * @return The client token associated with the request. */ public String getClientToken() { return this.clientToken; } /** * <p> * The client token associated with the request. * </p> * * @param clientToken * The client token associated with the request. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest withClientToken(String clientToken) { setClientToken(clientToken); return this; } /** * <p> * The ARN of the device to run the task on. * </p> * * @param deviceArn * The ARN of the device to run the task on. */ public void setDeviceArn(String deviceArn) { this.deviceArn = deviceArn; } /** * <p> * The ARN of the device to run the task on. * </p> * * @return The ARN of the device to run the task on. */ public String getDeviceArn() { return this.deviceArn; } /** * <p> * The ARN of the device to run the task on. * </p> * * @param deviceArn * The ARN of the device to run the task on. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest withDeviceArn(String deviceArn) { setDeviceArn(deviceArn); return this; } /** * <p> * The parameters for the device to run the task on. * </p> * <p> * This field's value must be valid JSON according to RFC 7159, including the opening and closing braces. For * example: '{"key": "value"}'. * </p> * <p> * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. * </p> * * @param deviceParameters * The parameters for the device to run the task on. */ public void setDeviceParameters(String deviceParameters) { this.deviceParameters = deviceParameters; } /** * <p> * The parameters for the device to run the task on. * </p> * <p> * This field's value will be valid JSON according to RFC 7159, including the opening and closing braces. For * example: '{"key": "value"}'. * </p> * * @return The parameters for the device to run the task on. */ public String getDeviceParameters() { return this.deviceParameters; } /** * <p> * The parameters for the device to run the task on. * </p> * <p> * This field's value must be valid JSON according to RFC 7159, including the opening and closing braces. For * example: '{"key": "value"}'. * </p> * <p> * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. * </p> * * @param deviceParameters * The parameters for the device to run the task on. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest withDeviceParameters(String deviceParameters) { setDeviceParameters(deviceParameters); return this; } /** * <p> * The token for an Amazon Braket job that associates it with the quantum task. * </p> * * @param jobToken * The token for an Amazon Braket job that associates it with the quantum task. */ public void setJobToken(String jobToken) { this.jobToken = jobToken; } /** * <p> * The token for an Amazon Braket job that associates it with the quantum task. * </p> * * @return The token for an Amazon Braket job that associates it with the quantum task. */ public String getJobToken() { return this.jobToken; } /** * <p> * The token for an Amazon Braket job that associates it with the quantum task. * </p> * * @param jobToken * The token for an Amazon Braket job that associates it with the quantum task. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest withJobToken(String jobToken) { setJobToken(jobToken); return this; } /** * <p> * The S3 bucket to store task result files in. * </p> * * @param outputS3Bucket * The S3 bucket to store task result files in. */ public void setOutputS3Bucket(String outputS3Bucket) { this.outputS3Bucket = outputS3Bucket; } /** * <p> * The S3 bucket to store task result files in. * </p> * * @return The S3 bucket to store task result files in. */ public String getOutputS3Bucket() { return this.outputS3Bucket; } /** * <p> * The S3 bucket to store task result files in. * </p> * * @param outputS3Bucket * The S3 bucket to store task result files in. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest withOutputS3Bucket(String outputS3Bucket) { setOutputS3Bucket(outputS3Bucket); return this; } /** * <p> * The key prefix for the location in the S3 bucket to store task results in. * </p> * * @param outputS3KeyPrefix * The key prefix for the location in the S3 bucket to store task results in. */ public void setOutputS3KeyPrefix(String outputS3KeyPrefix) { this.outputS3KeyPrefix = outputS3KeyPrefix; } /** * <p> * The key prefix for the location in the S3 bucket to store task results in. * </p> * * @return The key prefix for the location in the S3 bucket to store task results in. */ public String getOutputS3KeyPrefix() { return this.outputS3KeyPrefix; } /** * <p> * The key prefix for the location in the S3 bucket to store task results in. * </p> * * @param outputS3KeyPrefix * The key prefix for the location in the S3 bucket to store task results in. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest withOutputS3KeyPrefix(String outputS3KeyPrefix) { setOutputS3KeyPrefix(outputS3KeyPrefix); return this; } /** * <p> * The number of shots to use for the task. * </p> * * @param shots * The number of shots to use for the task. */ public void setShots(Long shots) { this.shots = shots; } /** * <p> * The number of shots to use for the task. * </p> * * @return The number of shots to use for the task. */ public Long getShots() { return this.shots; } /** * <p> * The number of shots to use for the task. * </p> * * @param shots * The number of shots to use for the task. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest withShots(Long shots) { setShots(shots); return this; } /** * <p> * Tags to be added to the quantum task you're creating. * </p> * * @return Tags to be added to the quantum task you're creating. */ public java.util.Map<String, String> getTags() { return tags; } /** * <p> * Tags to be added to the quantum task you're creating. * </p> * * @param tags * Tags to be added to the quantum task you're creating. */ public void setTags(java.util.Map<String, String> tags) { this.tags = tags; } /** * <p> * Tags to be added to the quantum task you're creating. * </p> * * @param tags * Tags to be added to the quantum task you're creating. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest withTags(java.util.Map<String, String> tags) { setTags(tags); return this; } /** * Add a single Tags entry * * @see CreateQuantumTaskRequest#withTags * @returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest addTagsEntry(String key, String value) { if (null == this.tags) { this.tags = new java.util.HashMap<String, String>(); } if (this.tags.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.tags.put(key, value); return this; } /** * Removes all the entries added into Tags. * * @return Returns a reference to this object so that method calls can be chained together. */ public CreateQuantumTaskRequest clearTagsEntries() { this.tags = null; return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAction() != null) sb.append("Action: ").append(getAction()).append(","); if (getClientToken() != null) sb.append("ClientToken: ").append(getClientToken()).append(","); if (getDeviceArn() != null) sb.append("DeviceArn: ").append(getDeviceArn()).append(","); if (getDeviceParameters() != null) sb.append("DeviceParameters: ").append(getDeviceParameters()).append(","); if (getJobToken() != null) sb.append("JobToken: ").append(getJobToken()).append(","); if (getOutputS3Bucket() != null) sb.append("OutputS3Bucket: ").append(getOutputS3Bucket()).append(","); if (getOutputS3KeyPrefix() != null) sb.append("OutputS3KeyPrefix: ").append(getOutputS3KeyPrefix()).append(","); if (getShots() != null) sb.append("Shots: ").append(getShots()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateQuantumTaskRequest == false) return false; CreateQuantumTaskRequest other = (CreateQuantumTaskRequest) obj; if (other.getAction() == null ^ this.getAction() == null) return false; if (other.getAction() != null && other.getAction().equals(this.getAction()) == false) return false; if (other.getClientToken() == null ^ this.getClientToken() == null) return false; if (other.getClientToken() != null && other.getClientToken().equals(this.getClientToken()) == false) return false; if (other.getDeviceArn() == null ^ this.getDeviceArn() == null) return false; if (other.getDeviceArn() != null && other.getDeviceArn().equals(this.getDeviceArn()) == false) return false; if (other.getDeviceParameters() == null ^ this.getDeviceParameters() == null) return false; if (other.getDeviceParameters() != null && other.getDeviceParameters().equals(this.getDeviceParameters()) == false) return false; if (other.getJobToken() == null ^ this.getJobToken() == null) return false; if (other.getJobToken() != null && other.getJobToken().equals(this.getJobToken()) == false) return false; if (other.getOutputS3Bucket() == null ^ this.getOutputS3Bucket() == null) return false; if (other.getOutputS3Bucket() != null && other.getOutputS3Bucket().equals(this.getOutputS3Bucket()) == false) return false; if (other.getOutputS3KeyPrefix() == null ^ this.getOutputS3KeyPrefix() == null) return false; if (other.getOutputS3KeyPrefix() != null && other.getOutputS3KeyPrefix().equals(this.getOutputS3KeyPrefix()) == false) return false; if (other.getShots() == null ^ this.getShots() == null) return false; if (other.getShots() != null && other.getShots().equals(this.getShots()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAction() == null) ? 0 : getAction().hashCode()); hashCode = prime * hashCode + ((getClientToken() == null) ? 0 : getClientToken().hashCode()); hashCode = prime * hashCode + ((getDeviceArn() == null) ? 0 : getDeviceArn().hashCode()); hashCode = prime * hashCode + ((getDeviceParameters() == null) ? 0 : getDeviceParameters().hashCode()); hashCode = prime * hashCode + ((getJobToken() == null) ? 0 : getJobToken().hashCode()); hashCode = prime * hashCode + ((getOutputS3Bucket() == null) ? 0 : getOutputS3Bucket().hashCode()); hashCode = prime * hashCode + ((getOutputS3KeyPrefix() == null) ? 0 : getOutputS3KeyPrefix().hashCode()); hashCode = prime * hashCode + ((getShots() == null) ? 0 : getShots().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public CreateQuantumTaskRequest clone() { return (CreateQuantumTaskRequest) super.clone(); } }
/******************************************************************************* * Copyright 2016 Antoine Nicolas SAMAHA * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package com.foc.dataSource.servlet; import java.io.IOException; import java.io.PrintWriter; import java.lang.reflect.Constructor; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.foc.Globals; import com.foc.SrvConst_ServerSide; import com.foc.admin.FocLoginAccess; import com.foc.util.Encryptor; import com.foc.vaadin.FocWebApplication; import com.foc.web.server.FocWebServer; import com.foc.web.server.session.FocWebSession; @SuppressWarnings("serial") public class FocLinkServlet extends HttpServlet implements SrvConst_ServerSide { public void doGet (HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html"); response.setHeader("Pragma", "No-cache"); response.setDateHeader("Expires", 0); response.setHeader("Cache-Control", "no-cache"); PrintWriter out = response.getWriter(); out.println("<html>"); out.println("<head><title>Everpro Servlet</title></head>"); out.println("<body>"); out.println("<h1>Everpro Servlet</h1>"); out.println ("<p> Please post a request to this servlet in order to get a valid response."); out.println("</body></html>"); out.flush(); } public SessionAndApplication doPost_Init(HttpServletRequest request, HttpServletResponse response) throws IOException { String requestSessionID = request.getSession().getId(); Globals.logString("SESSION_ID = " + requestSessionID); // Using the Basic authorization HTTP protocol creates an // IllegalArgumentException in Java. Known bug. Apparently fixed in JDK7 // String auth = request.getHeader("Authorization"); //FocWebServer webServer = FocWebServer.connect(request.getSession().getServletContext()); // FocThreadLocal.setWebServer(webServer);//This line is important when the // WebServer already exists. In the second call or if the WebServer is // launched by the GUI FocWebApplication webApplication = null; FocWebSession webSession = null; int status = com.foc.Application.LOGIN_WRONG; { webApplication = FocWebServer.findWebApplicationBySessionID(requestSessionID, request.getSession().getServletContext()); if (webApplication == null) { Globals.logString("EverproLinkServlet is creating a new FocWebApplication(UI)"); try { Class cls = Class.forName("siren.isf.fenix.main.FenixUI"); Class[] param = new Class[0]; Constructor constr = cls.getConstructor(param); Object[] argsNew = new Object[0]; webApplication = (FocWebApplication) constr.newInstance(argsNew); FocWebApplication.setInstanceForThread(webApplication); webApplication.initialize(null, request.getServletContext(), request.getSession(), false); webApplication.setData(FocWebServer.getInstance()); } catch (Exception e) { Globals.logException(e); } } if (webApplication != null) { webSession = webApplication.getFocWebSession(); } // --------------------------------------------- // If the FocWebSession is not found, create a new FocWebApplication and a // new FocWebSession. if (webSession == null) { Globals.logString("EverproLinkServlet is creating a new FocWebSession and adding the FocWebApplication(UI) to the webServer"); webApplication.setFocWebSession(request.getSession(), new FocWebSession(request.getSession())); FocWebServer.getInstance().addApplication(webApplication); webSession = FocWebServer.findWebSessionBySessionID(requestSessionID, FocWebServer.getInstance()); } // --------------------------------------------- // If the FocWebSession has no user, try to log in by reading the user // name and password from the HTTP request header. if (webSession != null && webSession.getFocUser() == null) { String username = request.getHeader("username"); String password = request.getHeader("password"); if (username == null) { username = (String) request.getAttribute(HEADER_KEY_USERNAME); } if (password == null) { password = (String) request.getAttribute(HEADER_KEY_PASSWORD); } Globals.logString(username); Globals.logString(password); String encryptedPassword = Encryptor.encrypt_MD5(String.valueOf(password)); FocLoginAccess loginAccess = new FocLoginAccess(); status = loginAccess.checkUserPassword(username, encryptedPassword, false); if (status == com.foc.Application.LOGIN_VALID) { // webSession = newApplication.getFocWebSession(); webSession.setFocUser(loginAccess.getUser()); } if (status == com.foc.Application.LOGIN_WRONG) { Globals.logString("Error: Login credentials are incorrect."); // PrintWriter printWriter = response.getWriter(); // printWriter.println("Error: Login credentials are incorrect."); } } } SessionAndApplication session = new SessionAndApplication(webSession, webApplication, status); return session; } public class SessionAndApplication { private FocWebSession webSession = null; private FocWebApplication webApplication = null; private int status = com.foc.Application.LOGIN_WRONG; public SessionAndApplication(FocWebSession webSession, FocWebApplication webApplication, int status){ this.webSession = webSession; this.webApplication = webApplication; this.status = status; } public void dispose(){ webSession = null; webApplication = null; } public FocWebSession getWebSession() { return webSession; } public void setWebSession(FocWebSession webSession) { this.webSession = webSession; } public FocWebApplication getWebApplication() { return webApplication; } public void setWebApplication(FocWebApplication webApplication) { this.webApplication = webApplication; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } } public SessionAndApplication doPost_Init2(HttpServletRequest request, HttpServletResponse response) throws IOException { String requestSessionID = request.getSession().getId(); Globals.logString("SESSION_ID = " + requestSessionID); // Using the Basic authorization HTTP protocol creates an // IllegalArgumentException in Java. Known bug. Apparently fixed in JDK7 // String auth = request.getHeader("Authorization"); FocWebServer webServer = FocWebServer.connect(request.getSession().getServletContext(), false); // FocThreadLocal.setWebServer(webServer);//This line is important when the // WebServer already exists. In the second call or if the WebServer is // launched by the GUI FocWebApplication webApplication = null; FocWebSession webSession = null; int status = com.foc.Application.LOGIN_WRONG; if (webServer == null) { Globals.logString("Could not connect to the WebServer webServer = null"); } else { webApplication = FocWebServer.findWebApplicationBySessionID(requestSessionID, webServer); if (webApplication == null) { Globals.logString("EverproLinkServlet is creating a new FocWebApplication(UI)"); try { Class cls = Class.forName("b01.everpro.custom.application.CustomEverproWebApplication"); Class[] param = new Class[0]; Constructor constr = cls.getConstructor(param); Object[] argsNew = new Object[0]; webApplication = (FocWebApplication) constr.newInstance(argsNew); FocWebApplication.setInstanceForThread(webApplication); webApplication.setData(webServer); } catch (Exception e) { Globals.logException(e); } } if (webApplication != null) { webSession = webApplication.getFocWebSession(); } // --------------------------------------------- // If the FocWebSession is not found, create a new FocWebApplication and a // new FocWebSession. if (webSession == null) { Globals.logString("EverproLinkServlet is creating a new FocWebSession and adding the FocWebApplication(UI) to the webServer"); webApplication.setFocWebSession(request.getSession(), new FocWebSession(request.getSession())); webServer.addApplication(webApplication); webSession = FocWebServer.findWebSessionBySessionID(requestSessionID, webServer); } // --------------------------------------------- // If the FocWebSession has no user, try to log in by reading the user // name and password from the HTTP request header. if (webSession != null && webSession.getFocUser() == null) { String username = request.getHeader("username"); String password = request.getHeader("password"); if (username == null) { username = (String) request.getAttribute(HEADER_KEY_USERNAME); } if (password == null) { password = (String) request.getAttribute(HEADER_KEY_PASSWORD); } Globals.logString(username); Globals.logString(password); String encryptedPassword = Encryptor.encrypt_MD5(String.valueOf(password)); FocLoginAccess loginAccess = new FocLoginAccess(); status = loginAccess.checkUserPassword(username, encryptedPassword, false); if (status == com.foc.Application.LOGIN_VALID) { // webSession = newApplication.getFocWebSession(); webSession.setFocUser(loginAccess.getUser()); } if (status == com.foc.Application.LOGIN_WRONG) { Globals.logString("Error: Login credentials are incorrect."); // PrintWriter printWriter = response.getWriter(); // printWriter.println("Error: Login credentials are incorrect."); } } } SessionAndApplication session = new SessionAndApplication(webSession, webApplication, status); return session; } }
package jelectrum; import java.util.HashMap; import java.util.HashSet; import java.util.Set; import java.util.Map; import java.util.TreeMap; import java.util.TreeSet; import java.util.Collection; import java.util.List; import java.util.ArrayList; import com.google.bitcoin.core.Sha256Hash; import com.google.bitcoin.core.Transaction; import com.google.bitcoin.core.StoredBlock; import com.google.bitcoin.core.Block; import org.json.JSONObject; import org.json.JSONArray; /** * Why is the logic of preparing results for clients * mixed between here and StratumConnection? This needs to be refactored. */ public class ElectrumNotifier { Map<String, Subscriber> block_subscribers; Map<String, Subscriber> blocknum_subscribers; Map<String, Map<String, Subscriber> > address_subscribers; LRUCache<String, String> address_sums; Jelectrum jelly; StoredBlock chain_head; Object chain_head_lock= new Object(); public ElectrumNotifier(Jelectrum jelly) { this.jelly = jelly; block_subscribers = new HashMap<String, Subscriber>(512, 0.5f); blocknum_subscribers = new HashMap<String, Subscriber>(512, 0.5f); address_subscribers = new HashMap<String, Map<String, Subscriber> >(512, 0.5f); address_sums = new LRUCache<String, String>(10000); } public void start() throws com.google.bitcoin.store.BlockStoreException { chain_head = jelly.getBlockStore().getChainHead(); new PruneThread().start(); } public int getHeadHeight() { return chain_head.getHeight(); } public void registerBlockchainHeaders(StratumConnection conn, Object request_id, boolean send_initial) { Subscriber sub = new Subscriber(conn, request_id); synchronized(block_subscribers) { String conn_id = conn.getId(); block_subscribers.put(conn_id, sub); } if (send_initial) { StoredBlock blk = chain_head; try { JSONObject reply = sub.startReply(); JSONObject block_data = new JSONObject(); populateBlockData(blk, block_data); reply.put("result", block_data); sub.sendReply(reply); } catch(org.json.JSONException e) { throw new RuntimeException(e); } } } public void registerBlockCount(StratumConnection conn, Object request_id, boolean send_initial) { Subscriber sub = new Subscriber(conn, request_id); synchronized(blocknum_subscribers) { String conn_id = conn.getId(); blocknum_subscribers.put(conn_id, sub); } if (send_initial) { StoredBlock blk = chain_head; try { JSONObject reply = sub.startReply(); reply.put("result", blk.getHeight()); sub.sendReply(reply); } catch(org.json.JSONException e) { throw new RuntimeException(e); } } } public void notifyNewBlock(Block b) { if (chain_head == null) return; StoredBlock blk = null; try { blk = jelly.getBlockStore().get(b.getHash()); } catch(com.google.bitcoin.store.BlockStoreException e) { throw new RuntimeException(e); } synchronized(chain_head_lock) { if (blk.getHeight() > chain_head.getHeight()) { chain_head = blk; } } synchronized(block_subscribers) { for(Subscriber sub : block_subscribers.values()) { blockNotify(sub, blk); } } synchronized(blocknum_subscribers) { for(Subscriber sub : blocknum_subscribers.values()) { blockNumNotify(sub, blk); } } } public void notifyNewTransaction(Transaction tx, Collection<String> addresses, int height) { synchronized(address_sums) { for(String s : addresses) { address_sums.remove(s); } } //Inside a sync do a deep copy of just the entries that we need Map<String, Map<String, Subscriber> > address_subscribers_copy = new HashMap<String, Map<String, Subscriber>>(); synchronized(address_subscribers) { for(String s : addresses) { Map<String, Subscriber> m = address_subscribers.get(s); if (m != null) { TreeMap<String, Subscriber> copy = new TreeMap<String, Subscriber>(); copy.putAll(m); address_subscribers_copy.put(s, m); } } } //Now with our clean copy we can do the notifications without holding any locks try { for(String s : addresses) { Map<String, Subscriber> m = address_subscribers_copy.get(s); if ((m != null) && (m.size() > 0)) { String sum = getAddressChecksum(s); JSONObject reply = new JSONObject(); JSONArray info = new JSONArray(); info.put(s); info.put(sum); reply.put("params", info); reply.put("id", JSONObject.NULL); reply.put("method", "blockchain.address.subscribe"); for(Subscriber sub : m.values()) { sub.sendReply(reply); } } } } catch(org.json.JSONException e) { throw new RuntimeException(e); } } private void blockNotify(Subscriber sub, StoredBlock blk) { try { JSONObject reply = new JSONObject(); JSONObject block_data = new JSONObject(); populateBlockData(blk, block_data); JSONArray crap = new JSONArray(); crap.put(block_data); reply.put("params", crap); reply.put("id", JSONObject.NULL); reply.put("method", "blockchain.headers.subscribe"); sub.sendReply(reply); } catch(org.json.JSONException e) { throw new RuntimeException(e); } } private void blockNumNotify(Subscriber sub, StoredBlock blk) { try { JSONObject reply = new JSONObject(); JSONArray crap = new JSONArray(); crap.put(blk.getHeight()); reply.put("params", crap); reply.put("id", JSONObject.NULL); reply.put("method", "blockchain.numblocks.subscribe"); sub.sendReply(reply); } catch(org.json.JSONException e) { throw new RuntimeException(e); } } public void populateBlockData(StoredBlock blk, JSONObject block_data) throws org.json.JSONException { Block header = blk.getHeader(); block_data.put("nonce", header.getNonce()); block_data.put("prev_block_hash", header.getPrevBlockHash().toString()); block_data.put("timestamp", header.getTimeSeconds()); block_data.put("merkle_root", header.getMerkleRoot().toString()); block_data.put("block_height", blk.getHeight()); block_data.put("version",header.getVersion()); block_data.put("bits", header.getDifficultyTarget()); block_data.put("utxo_root", jelly.getUtxoTrieMgr().getRootHash()); } public void registerBlockchainAddress(StratumConnection conn, Object request_id, boolean send_initial, String address) { Subscriber sub = new Subscriber(conn, request_id); synchronized(address_subscribers) { if (address_subscribers.get(address) == null) { address_subscribers.put(address, new TreeMap<String, Subscriber>()); } address_subscribers.get(address).put(conn.getId(), sub); } if (send_initial) { try { JSONObject reply = sub.startReply(); String sum = getAddressChecksum(address); if (sum==null) { reply.put("result", JSONObject.NULL); } else { reply.put("result", sum); } sub.sendReply(reply); } catch(org.json.JSONException e) { throw new RuntimeException(e); } } } public void sendAddressHistory(StratumConnection conn, Object request_id, String address) { Subscriber sub = new Subscriber(conn, request_id); try { JSONObject reply = sub.startReply(); reply.put("result", getAddressHistory(address)); sub.sendReply(reply); } catch(org.json.JSONException e) { throw new RuntimeException(e); } } public Object getAddressHistory(String address) { try { List<SortedTransaction> lst = getTransactionsForAddress(address); if (lst.size() > 0) { JSONArray arr =new JSONArray(); for(SortedTransaction ts : lst) { JSONObject o = new JSONObject(); o.put("tx_hash", ts.tx.getHash().toString()); if (ts.block != null) { o.put("height", ts.height); } else { o.put("height", 0); } arr.put(o); } return arr; } else { return JSONObject.NULL; } } catch(org.json.JSONException e) { throw new RuntimeException(e); } } public String getAddressChecksum(String address) { synchronized(address_sums) { if (address_sums.containsKey(address)) { return address_sums.get(address); } } String hash = null; List<SortedTransaction> lst = getTransactionsForAddress(address); if (lst.size() > 0) { StringBuilder sb = new StringBuilder(); for(SortedTransaction ts : lst) { sb.append(ts.tx.getHash()); sb.append(':'); if (ts.block != null) { sb.append(ts.height); } else { sb.append("0"); } sb.append(':'); } hash = Util.SHA256(sb.toString()); } synchronized(address_sums) { address_sums.put(address,hash); } return hash; } public class PruneThread extends Thread { public PruneThread() { setName("ElectrumNotifier/PruneThread"); setDaemon(true); } public void run() { while(true) { try{Thread.sleep(60000);}catch(Exception e){} TreeSet<String> to_delete =new TreeSet<String>(); synchronized(block_subscribers) { for(Subscriber sub : block_subscribers.values()) { if (!sub.isOpen()) { to_delete.add(sub.getId()); } } for(String id : to_delete) { block_subscribers.remove(id); } } //TODO - finish this monster /*synchronized(address_subscribers) { for(String address : address_subscribers.keySet()) { } }*/ } } } public List<SortedTransaction> getTransactionsForAddress(String address) { Set<Sha256Hash> tx_list = jelly.getDB().getAddressToTxSet(address); ArrayList<SortedTransaction> out = new ArrayList<SortedTransaction>(); if (tx_list != null) { TreeSet<SortedTransaction> set = new TreeSet<SortedTransaction>(); for(Sha256Hash tx_hash : tx_list) { SortedTransaction stx = new SortedTransaction(tx_hash); if (stx.isValid()) { set.add(stx); } } for(SortedTransaction s : set) { out.add(s); } } return out; } public class Subscriber { private StratumConnection conn; private Object request_id; public Subscriber(StratumConnection conn, Object request_id) { this.conn = conn; this.request_id = request_id; } public JSONObject startReply() throws org.json.JSONException { JSONObject reply = new JSONObject(); reply.put("id", request_id); return reply; } public void sendReply(JSONObject o) { conn.sendMessage(o); } public boolean isOpen() { return conn.isOpen(); } public String getId() { return conn.getId(); } } public class SortedTransaction implements Comparable<SortedTransaction> { SerializedTransaction s_tx; Transaction tx; StoredBlock block; int height; public SortedTransaction(Sha256Hash tx_hash) { this.s_tx = jelly.getDB().getTransactionMap().get(tx_hash); if (s_tx==null) return; this.tx = s_tx.getTx(jelly.getNetworkParameters()); Set<Sha256Hash> block_list = jelly.getDB().getTxToBlockMap(tx.getHash()); if (block_list != null) { for(Sha256Hash block_hash : block_list) { block = jelly.getDB().getBlockStoreMap().get(block_hash); height = block.getHeight(); } } } public int getEffectiveHeight() { if (block != null) return height; return Integer.MAX_VALUE; } public int compareTo(SortedTransaction o) { /*if (getEffectiveHeight() > o.getEffectiveHeight()) return -1; if (getEffectiveHeight() < o.getEffectiveHeight()) return 1;*/ return tx.getHash().toString().compareTo(o.tx.getHash().toString()); } public boolean isValid() { if (s_tx ==null) return false; if (block!=null) return true; if (s_tx.getSavedTime() + 86400L * 1000L > System.currentTimeMillis()) return true; return false; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.hive; import com.google.common.collect.ImmutableList; import com.google.common.util.concurrent.SettableFuture; import io.airlift.stats.CounterStat; import io.airlift.units.DataSize; import io.prestosql.spi.PrestoException; import io.prestosql.spi.connector.ConnectorSplit; import io.prestosql.spi.connector.ConnectorSplitSource; import org.testng.annotations.Test; import java.time.Instant; import java.util.List; import java.util.Optional; import java.util.OptionalInt; import java.util.Properties; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import static io.airlift.concurrent.MoreFutures.getFutureValue; import static io.airlift.testing.Assertions.assertContains; import static io.airlift.units.DataSize.Unit.MEGABYTE; import static io.prestosql.plugin.hive.HiveTestUtils.SESSION; import static io.prestosql.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED; import static java.lang.Math.toIntExact; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import static org.testng.Assert.fail; public class TestHiveSplitSource { @Test public void testOutstandingSplitCount() { HiveSplitSource hiveSplitSource = HiveSplitSource.allAtOnce( SESSION, "database", "table", 10, 10, DataSize.of(1, MEGABYTE), Integer.MAX_VALUE, new TestingHiveSplitLoader(), Executors.newFixedThreadPool(5), new CounterStat()); // add 10 splits for (int i = 0; i < 10; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), i + 1); } // remove 1 split assertEquals(getSplits(hiveSplitSource, 1).size(), 1); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 9); // remove 4 splits assertEquals(getSplits(hiveSplitSource, 4).size(), 4); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 5); // try to remove 20 splits, and verify we only got 5 assertEquals(getSplits(hiveSplitSource, 20).size(), 5); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 0); } @Test public void testFail() { HiveSplitSource hiveSplitSource = HiveSplitSource.allAtOnce( SESSION, "database", "table", 10, 10, DataSize.of(1, MEGABYTE), Integer.MAX_VALUE, new TestingHiveSplitLoader(), Executors.newFixedThreadPool(5), new CounterStat()); // add some splits for (int i = 0; i < 5; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), i + 1); } // remove a split and verify assertEquals(getSplits(hiveSplitSource, 1).size(), 1); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4); // fail source hiveSplitSource.fail(new RuntimeException("test")); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4); // try to remove a split and verify we got the expected exception try { getSplits(hiveSplitSource, 1); fail("expected RuntimeException"); } catch (RuntimeException e) { assertEquals(e.getMessage(), "test"); } assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4); // 3 splits + poison // attempt to add another split and verify it does not work hiveSplitSource.addToQueue(new TestSplit(99)); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4); // 3 splits + poison // fail source again hiveSplitSource.fail(new RuntimeException("another failure")); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), 4); // 3 splits + poison // try to remove a split and verify we got the first exception try { getSplits(hiveSplitSource, 1); fail("expected RuntimeException"); } catch (RuntimeException e) { assertEquals(e.getMessage(), "test"); } } @Test public void testReaderWaitsForSplits() throws Exception { HiveSplitSource hiveSplitSource = HiveSplitSource.allAtOnce( SESSION, "database", "table", 10, 10, DataSize.of(1, MEGABYTE), Integer.MAX_VALUE, new TestingHiveSplitLoader(), Executors.newFixedThreadPool(5), new CounterStat()); SettableFuture<ConnectorSplit> splits = SettableFuture.create(); // create a thread that will get a split CountDownLatch started = new CountDownLatch(1); Thread getterThread = new Thread(() -> { try { started.countDown(); List<ConnectorSplit> batch = getSplits(hiveSplitSource, 1); assertEquals(batch.size(), 1); splits.set(batch.get(0)); } catch (Throwable e) { splits.setException(e); } }); getterThread.start(); try { // wait for the thread to be started assertTrue(started.await(1, TimeUnit.SECONDS)); // sleep for a bit, and assure the thread is blocked TimeUnit.MILLISECONDS.sleep(200); assertTrue(!splits.isDone()); // add a split hiveSplitSource.addToQueue(new TestSplit(33)); // wait for thread to get the split ConnectorSplit split = splits.get(800, TimeUnit.MILLISECONDS); assertEquals(((HiveSplit) split).getSchema().getProperty("id"), "33"); } finally { // make sure the thread exits getterThread.interrupt(); } } @Test public void testOutstandingSplitSize() { DataSize maxOutstandingSplitsSize = DataSize.of(1, MEGABYTE); HiveSplitSource hiveSplitSource = HiveSplitSource.allAtOnce( SESSION, "database", "table", 10, 10000, maxOutstandingSplitsSize, Integer.MAX_VALUE, new TestingHiveSplitLoader(), Executors.newFixedThreadPool(5), new CounterStat()); int testSplitSizeInBytes = new TestSplit(0).getEstimatedSizeInBytes(); int maxSplitCount = toIntExact(maxOutstandingSplitsSize.toBytes()) / testSplitSizeInBytes; for (int i = 0; i < maxSplitCount; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), i + 1); } assertEquals(getSplits(hiveSplitSource, maxSplitCount).size(), maxSplitCount); for (int i = 0; i < maxSplitCount; i++) { hiveSplitSource.addToQueue(new TestSplit(i)); assertEquals(hiveSplitSource.getBufferedInternalSplitCount(), i + 1); } try { hiveSplitSource.addToQueue(new TestSplit(0)); fail("expect failure"); } catch (PrestoException e) { assertContains(e.getMessage(), "Split buffering for database.table exceeded memory limit"); } } @Test public void testEmptyBucket() { HiveSplitSource hiveSplitSource = HiveSplitSource.bucketed( SESSION, "database", "table", 10, 10, DataSize.of(1, MEGABYTE), Integer.MAX_VALUE, new TestingHiveSplitLoader(), Executors.newFixedThreadPool(5), new CounterStat()); hiveSplitSource.addToQueue(new TestSplit(0, OptionalInt.of(2))); hiveSplitSource.noMoreSplits(); assertEquals(getSplits(hiveSplitSource, OptionalInt.of(0), 10).size(), 0); assertEquals(getSplits(hiveSplitSource, OptionalInt.of(1), 10).size(), 0); assertEquals(getSplits(hiveSplitSource, OptionalInt.of(2), 10).size(), 1); assertEquals(getSplits(hiveSplitSource, OptionalInt.of(3), 10).size(), 0); } private static List<ConnectorSplit> getSplits(ConnectorSplitSource source, int maxSize) { return getSplits(source, OptionalInt.empty(), maxSize); } private static List<ConnectorSplit> getSplits(ConnectorSplitSource source, OptionalInt bucketNumber, int maxSize) { if (bucketNumber.isPresent()) { return getFutureValue(source.getNextBatch(new HivePartitionHandle(bucketNumber.getAsInt()), maxSize)).getSplits(); } else { return getFutureValue(source.getNextBatch(NOT_PARTITIONED, maxSize)).getSplits(); } } private static class TestingHiveSplitLoader implements HiveSplitLoader { @Override public void start(HiveSplitSource splitSource) { } @Override public void stop() { } } private static class TestSplit extends InternalHiveSplit { private TestSplit(int id) { this(id, OptionalInt.empty()); } private TestSplit(int id, OptionalInt bucketNumber) { super( "partition-name", "path", 0, 100, 100, Instant.now().toEpochMilli(), properties("id", String.valueOf(id)), ImmutableList.of(), ImmutableList.of(new InternalHiveBlock(0, 100, ImmutableList.of())), bucketNumber, true, false, TableToPartitionMapping.empty(), Optional.empty(), false, Optional.empty()); } private static Properties properties(String key, String value) { Properties properties = new Properties(); properties.setProperty(key, value); return properties; } } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.TransactionGuard; import com.intellij.openapi.fileTypes.FileTypes; import com.intellij.openapi.fileTypes.INativeFileType; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.vfs.PersistentFSConstants; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ToolWindow; import com.intellij.pom.Navigatable; import com.intellij.util.Alarm; import com.intellij.util.OpenSourceUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; public abstract class AutoScrollToSourceHandler { private Alarm myAutoScrollAlarm; protected AutoScrollToSourceHandler() { } public void install(final JTree tree) { myAutoScrollAlarm = new Alarm(); new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { if (clickCount > 1) return false; TreePath location = tree.getPathForLocation(e.getPoint().x, e.getPoint().y); if (location != null) { onMouseClicked(tree); return isAutoScrollMode(); } return false; } }.installOn(tree); tree.addMouseMotionListener(new MouseMotionAdapter() { public void mouseDragged(final MouseEvent e) { onSelectionChanged(tree); } }); tree.addTreeSelectionListener( new TreeSelectionListener() { public void valueChanged(TreeSelectionEvent e) { onSelectionChanged(tree); } } ); } public void install(final JTable table) { myAutoScrollAlarm = new Alarm(); new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { if (clickCount >= 2) return false; Component location = table.getComponentAt(e.getPoint()); if (location != null) { onMouseClicked(table); return isAutoScrollMode(); } return false; } }.installOn(table); table.addMouseMotionListener(new MouseMotionAdapter() { public void mouseDragged(final MouseEvent e) { onSelectionChanged(table); } }); table.getSelectionModel().addListSelectionListener( new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { onSelectionChanged(table); } } ); } public void install(final JList jList) { myAutoScrollAlarm = new Alarm(); new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { if (clickCount >= 2) return false; final Object source = e.getSource(); final int index = jList.locationToIndex(SwingUtilities.convertPoint(source instanceof Component ? (Component)source : null, e.getPoint(), jList)); if (index >= 0 && index < jList.getModel().getSize()) { onMouseClicked(jList); return true; } return false; } }.installOn(jList); jList.addListSelectionListener(new ListSelectionListener() { public void valueChanged(ListSelectionEvent e) { onSelectionChanged(jList); } }); } public void cancelAllRequests(){ if (myAutoScrollAlarm != null) { myAutoScrollAlarm.cancelAllRequests(); } } public void onMouseClicked(final Component component) { cancelAllRequests(); if (isAutoScrollMode()){ ApplicationManager.getApplication().invokeLater(() -> scrollToSource(component)); } } private void onSelectionChanged(final Component component) { if (component != null && !component.isShowing()) return; if (!isAutoScrollMode()) { return; } if (needToCheckFocus() && !component.hasFocus()) { return; } myAutoScrollAlarm.cancelAllRequests(); myAutoScrollAlarm.addRequest( () -> { if (component.isShowing()) { //for tests scrollToSource(component); } }, 500 ); } protected boolean needToCheckFocus(){ return true; } protected abstract boolean isAutoScrollMode(); protected abstract void setAutoScrollMode(boolean state); protected void scrollToSource(final Component tree) { DataContext dataContext=DataManager.getInstance().getDataContext(tree); getReady(dataContext).doWhenDone(() -> TransactionGuard.submitTransaction(ApplicationManager.getApplication(), () -> { DataContext context = DataManager.getInstance().getDataContext(tree); final VirtualFile vFile = CommonDataKeys.VIRTUAL_FILE.getData(context); if (vFile != null) { // Attempt to navigate to the virtual file with unknown file type will show a modal dialog // asking to register some file type for this file. This behaviour is undesirable when autoscrolling. if (vFile.getFileType() == FileTypes.UNKNOWN || vFile.getFileType() instanceof INativeFileType) return; //IDEA-84881 Don't autoscroll to very large files if (vFile.getLength() > PersistentFSConstants.getMaxIntellisenseFileSize()) return; } Navigatable[] navigatables = CommonDataKeys.NAVIGATABLE_ARRAY.getData(context); if (navigatables != null) { if (navigatables.length > 1) { return; } for (Navigatable navigatable : navigatables) { // we are not going to open modal dialog during autoscrolling if (!navigatable.canNavigateToSource()) return; } } OpenSourceUtil.navigate(false, true, navigatables); })); } @NotNull public ToggleAction createToggleAction() { return new AutoscrollToSourceAction(); } private class AutoscrollToSourceAction extends ToggleAction implements DumbAware { public AutoscrollToSourceAction() { super(UIBundle.message("autoscroll.to.source.action.name"), UIBundle.message("autoscroll.to.source.action.description"), AllIcons.General.AutoscrollToSource); } public boolean isSelected(AnActionEvent event) { return isAutoScrollMode(); } public void setSelected(AnActionEvent event, boolean flag) { setAutoScrollMode(flag); } } private ActionCallback getReady(DataContext context) { ToolWindow toolWindow = PlatformDataKeys.TOOL_WINDOW.getData(context); return toolWindow != null ? toolWindow.getReady(this) : ActionCallback.DONE; } }
/* * JBoss, Home of Professional Open Source. * Copyright 2012 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.coyote.http11; import java.util.HashMap; import java.util.Iterator; import javax.management.MBeanRegistration; import javax.management.MBeanServer; import javax.management.ObjectName; import javax.net.ssl.SSLContext; import org.apache.coyote.Adapter; import org.apache.coyote.ProtocolHandler; import org.apache.tomcat.util.net.SSLImplementation; /** * {@code Http11AbstractProtocol} * <p> * Abstract the protocol implementation, including threading, etc. Processor is * single threaded and specific to stream-based protocols, will not fit Jk * protocols like JNI. * </p> * Created on Dec 19, 2011 at 11:58:19 AM * * @author <a href="mailto:nbenothm@redhat.com">Nabil Benothman</a> */ public abstract class Http11AbstractProtocol implements ProtocolHandler, MBeanRegistration { protected HashMap<String, Object> attributes = new HashMap<String, Object>(); // * protected ObjectName tpOname = null; // * protected ObjectName rgOname = null; protected SSLImplementation sslImplementation = null; /** * The adapter, used to call the connector. */ protected Adapter adapter; /** * Processor cache. */ protected int processorCache = -1; protected boolean canDestroy = false; protected int socketBuffer = 9000; /** * Maximum size of the post which will be saved when processing certain * requests, such as a POST. */ protected int maxSavePostSize = 4 * 1024; // --------------- HTTP --------------- /** * Maximum size of the HTTP message header. */ protected int maxHttpHeaderSize = Integer.valueOf(System.getProperty("org.apache.coyote.http11.Http11Protocol.MAX_HEADER_SIZE", "8192")).intValue(); /** * If true, the regular socket timeout will be used for the full duration of * the connection. */ protected boolean disableUploadTimeout = true; /** * Integrated compression support. */ protected String compression = System.getProperty("org.apache.coyote.http11.Http11Protocol.COMPRESSION", "off"); protected String noCompressionUserAgents = System.getProperty("org.apache.coyote.http11.Http11Protocol.COMPRESSION_RESTRICTED_UA"); protected String compressableMimeTypes = System.getProperty("org.apache.coyote.http11.Http11Protocol.COMPRESSION_MIME_TYPES", "text/html,text/xml,text/plain"); protected int compressionMinSize = Integer.valueOf(System.getProperty("org.apache.coyote.http11.Http11Protocol.COMPRESSION_MIN_SIZE", "2048")).intValue(); protected String protocol = null; /** * User agents regular expressions which should be restricted to HTTP/1.0 * support. */ protected String restrictedUserAgents = null; /** * Maximum number of requests which can be performed over a keepalive * connection. The default is the same as for Apache HTTP Server. */ protected int maxKeepAliveRequests = Integer.valueOf(System.getProperty("org.apache.coyote.http11.Http11Protocol.MAX_KEEP_ALIVE_REQUESTS", "-1")).intValue(); protected String domain; protected ObjectName oname; protected MBeanServer mserver; /** * Server header. */ protected String server = System.getProperty("org.apache.coyote.http11.Http11Protocol.SERVER"); /** * This timeout represents the socket timeout which will be used while the * adapter execution is in progress, unless disableUploadTimeout is set to * true. The default is the same as for Apache HTTP Server (300 000 * milliseconds). */ protected int timeout = 300000; /** * This field indicates if the protocol is secure from the perspective of * the client (= https is used). */ protected boolean secure; /** * Create a new instance of {@code Http11AbstractProtocol} */ public Http11AbstractProtocol() { super(); } /** * @return the name of the protocol */ public abstract String getName(); /* * (non-Javadoc) * * @see org.apache.coyote.ProtocolHandler#setAttribute(java.lang.String, * java.lang.Object) */ @Override public void setAttribute(String name, Object value) { attributes.put(name, value); } /* * (non-Javadoc) * * @see org.apache.coyote.ProtocolHandler#getAttribute(java.lang.String) */ @Override public Object getAttribute(String key) { return attributes.get(key); } /* * (non-Javadoc) * * @see org.apache.coyote.ProtocolHandler#getAttributeNames() */ @Override public Iterator<String> getAttributeNames() { return attributes.keySet().iterator(); } /* * (non-Javadoc) * * @see * org.apache.coyote.ProtocolHandler#setAdapter(org.apache.coyote.Adapter) */ @Override public void setAdapter(Adapter adapter) { this.adapter = adapter; } /* * (non-Javadoc) * * @see org.apache.coyote.ProtocolHandler#getAdapter() */ @Override public Adapter getAdapter() { return adapter; } /* * (non-Javadoc) * * @see org.apache.coyote.ProtocolHandler#hasIoEvents() */ @Override public boolean hasIoEvents() { return true; } /* * (non-Javadoc) * * @see * javax.management.MBeanRegistration#preRegister(javax.management.MBeanServer * , javax.management.ObjectName) */ @Override public ObjectName preRegister(MBeanServer server, ObjectName name) throws Exception { oname = name; mserver = server; domain = name.getDomain(); return name; } /* * (non-Javadoc) * * @see javax.management.MBeanRegistration#postRegister(java.lang.Boolean) */ @Override public void postRegister(Boolean registrationDone) { } /* * (non-Javadoc) * * @see javax.management.MBeanRegistration#preDeregister() */ @Override public void preDeregister() throws Exception { } /* * (non-Javadoc) * * @see javax.management.MBeanRegistration#postDeregister() */ @Override public void postDeregister() { } /** * Getter for attributes * * @return the attributes */ public HashMap<String, Object> getAttributes() { return this.attributes; } /** * Setter for the attributes * * @param attributes * the attributes to set */ public void setAttributes(HashMap<String, Object> attributes) { this.attributes = attributes; } /** * Getter for tpOname * * @return the tpOname */ public ObjectName getTpOname() { return this.tpOname; } /** * Setter for the tpOname * * @param tpOname * the tpOname to set */ public void setTpOname(ObjectName tpOname) { this.tpOname = tpOname; } /** * Getter for rgOname * * @return the rgOname */ public ObjectName getRgOname() { return this.rgOname; } /** * Setter for the rgOname * * @param rgOname * the rgOname to set */ public void setRgOname(ObjectName rgOname) { this.rgOname = rgOname; } /** * Getter for processorCache * * @return the processorCache */ public int getProcessorCache() { return this.processorCache; } /** * Setter for the processorCache * * @param processorCache * the processorCache to set */ public void setProcessorCache(int processorCache) { this.processorCache = processorCache; } /** * Getter for canDestroy * * @return the canDestroy */ public boolean getCanDestroy() { return this.canDestroy; } /** * Setter for the canDestroy * * @param canDestroy * the canDestroy to set */ public void setCanDestroy(boolean canDestroy) { this.canDestroy = canDestroy; } /** * Getter for socketBuffer * * @return the socketBuffer */ public int getSocketBuffer() { return this.socketBuffer; } /** * Setter for the socketBuffer * * @param socketBuffer * the socketBuffer to set */ public void setSocketBuffer(int socketBuffer) { this.socketBuffer = socketBuffer; } /** * Getter for maxSavePostSize * * @return the maxSavePostSize */ public int getMaxSavePostSize() { return this.maxSavePostSize; } /** * Setter for the maxSavePostSize * * @param maxSavePostSize * the maxSavePostSize to set */ public void setMaxSavePostSize(int maxSavePostSize) { this.maxSavePostSize = maxSavePostSize; } /** * Getter for maxHttpHeaderSize * * @return the maxHttpHeaderSize */ public int getMaxHttpHeaderSize() { return this.maxHttpHeaderSize; } /** * Setter for the maxHttpHeaderSize * * @param maxHttpHeaderSize * the maxHttpHeaderSize to set */ public void setMaxHttpHeaderSize(int maxHttpHeaderSize) { this.maxHttpHeaderSize = maxHttpHeaderSize; } /** * Getter for disableUploadTimeout * * @return the disableUploadTimeout */ public boolean getDisableUploadTimeout() { return this.disableUploadTimeout; } /** * Setter for the disableUploadTimeout * * @param disableUploadTimeout * the disableUploadTimeout to set */ public void setDisableUploadTimeout(boolean disableUploadTimeout) { this.disableUploadTimeout = disableUploadTimeout; } /** * Getter for compression * * @return the compression */ public String getCompression() { return this.compression; } /** * Setter for the compression * * @param compression * the compression to set */ public void setCompression(String compression) { this.compression = compression; } /** * Getter for noCompressionUserAgents * * @return the noCompressionUserAgents */ public String getNoCompressionUserAgents() { return this.noCompressionUserAgents; } /** * Setter for the noCompressionUserAgents * * @param noCompressionUserAgents * the noCompressionUserAgents to set */ public void setNoCompressionUserAgents(String noCompressionUserAgents) { this.noCompressionUserAgents = noCompressionUserAgents; } /** * Getter for compressableMimeTypes * * @return the compressableMimeTypes */ public String getCompressableMimeType() { return this.compressableMimeTypes; } /** * Setter for the compressableMimeTypes * * @param compressableMimeTypes * the compressableMimeTypes to set */ public void setCompressableMimeType(String compressableMimeTypes) { this.compressableMimeTypes = compressableMimeTypes; } /** * Getter for compressionMinSize * * @return the compressionMinSize */ public int getCompressionMinSize() { return this.compressionMinSize; } /** * Setter for the compressionMinSize * * @param compressionMinSize * the compressionMinSize to set */ public void setCompressionMinSize(int compressionMinSize) { this.compressionMinSize = compressionMinSize; } /** * Getter for protocol * * @return the protocol */ public String getProtocol() { return this.protocol; } /** * Setter for the protocol * * @param protocol * the protocol to set */ public void setProtocol(String protocol) { this.protocol = protocol; } /** * Getter for restrictedUserAgents * * @return the restrictedUserAgents */ public String getRestrictedUserAgents() { return this.restrictedUserAgents; } /** * Setter for the restrictedUserAgents * * @param restrictedUserAgents * the restrictedUserAgents to set */ public void setRestrictedUserAgents(String restrictedUserAgents) { this.restrictedUserAgents = restrictedUserAgents; } /** * Getter for maxKeepAliveRequests * * @return the maxKeepAliveRequests */ public int getMaxKeepAliveRequests() { return this.maxKeepAliveRequests; } /** * Setter for the maxKeepAliveRequests * * @param maxKeepAliveRequests * the maxKeepAliveRequests to set */ public void setMaxKeepAliveRequests(int maxKeepAliveRequests) { this.maxKeepAliveRequests = maxKeepAliveRequests; } /** * Getter for domain * * @return the domain */ public String getDomain() { return this.domain; } /** * Setter for the domain * * @param domain * the domain to set */ public void setDomain(String domain) { this.domain = domain; } /** * Getter for oname * * @return the oname */ public ObjectName getObjectName() { return this.oname; } /** * Setter for the oname * * @param oname * the oname to set */ public void setObjectName(ObjectName oname) { this.oname = oname; } /** * Getter for mserver * * @return the mserver */ public MBeanServer getMserver() { return this.mserver; } /** * Setter for the mserver * * @param mserver * the mserver to set */ public void setMserver(MBeanServer mserver) { this.mserver = mserver; } /** * Getter for server * * @return the server */ public String getServer() { return this.server; } /** * Setter for the server * * @param server * the server to set */ public void setServer(String server) { this.server = server; } /** * Getter for timeout * * @return the timeout */ public int getTimeout() { return this.timeout; } /** * Setter for the timeout * * @param timeout * the timeout to set */ public void setTimeout(int timeout) { this.timeout = timeout; } /** * Getter for secure * * @return the secure */ public boolean isSecure() { return this.secure; } /** * Setter for the secure * * @param secure * the secure to set */ public void setSecure(boolean secure) { this.secure = secure; } /** * @return the key store */ public String getKeystore() { return (String) getAttribute("keystore"); } /** * @param keystore */ public void setKeystore(String keystore) { setAttribute("keystore", keystore); } /** * @return the key pass */ public String getKeypass() { return (String) getAttribute("keypass"); } /** * @param keypass */ public void setKeypass(String keypass) { attributes.put("keypass", keypass); } /** * @return the key store type */ public String getKeytype() { return (String) getAttribute("keystoreType"); } /** * @param keyType */ public void setKeytype(String keyType) { setAttribute("keystoreType", keyType); } /** * @return the client authentication */ public String getClientauth() { return (String) getAttribute("clientauth"); } /** * @param k */ public void setClientauth(String k) { setAttribute("clientauth", k); } /** * @return the protocols */ public String getProtocols() { return (String) getAttribute("protocols"); } /** * @param protocols * the protocols to set */ public void setProtocols(String protocols) { setAttribute("protocols", protocols); } /** * @return the algorithm */ public String getAlgorithm() { return (String) getAttribute("algorithm"); } /** * @param k */ public void setAlgorithm(String k) { setAttribute("algorithm", k); } /** * @return the ciphers */ public String getCiphers() { return (String) getAttribute("ciphers"); } /** * * @param ciphers */ public void setCiphers(String ciphers) { setAttribute("ciphers", ciphers); } /** * @return the ke alias */ public String getKeyAlias() { return (String) getAttribute("keyAlias"); } /** * * @param keyAlias */ public void setKeyAlias(String keyAlias) { setAttribute("keyAlias", keyAlias); } /** * @return the SSL context */ public SSLContext getSSLContext() { return (SSLContext) getAttribute("SSLContext"); } /** * @param sslContext */ public void setSSLContext(SSLContext sslContext) { setAttribute("SSLContext", sslContext); } }
package com.smcplugin; import com.intellij.navigation.ItemPresentation; import com.intellij.psi.PsiFile; import com.smcplugin.psi.*; import com.sun.istack.internal.Nullable; import javax.swing.*; /** * scmplugin * Created by lemen on 28.03.2016. */ public class PresentationFactory { public static ItemPresentation forMap(final SmcMap element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getName(); } @Nullable @Override public String getLocationString() { PsiFile containingFile = element.getContainingFile(); return containingFile == null ? null : containingFile.getName(); } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.SM_MAP; } }; } public static ItemPresentation forMapInStructure(final SmcMap element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getName(); } @Nullable @Override public String getLocationString() { return null; } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.SM_MAP; } }; } public static ItemPresentation forState(final SmcState element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getName(); } @Nullable @Override public String getLocationString() { PsiFile containingFile = element.getContainingFile(); return containingFile == null ? null : containingFile.getName(); } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.STATE; } }; } public static ItemPresentation forStateInStructure(final SmcState element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getName(); } @Nullable @Override public String getLocationString() { return null; } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.STATE; } }; } public static ItemPresentation forAction(final SmcAction element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getQualifiedFullName(); } @Nullable @Override public String getLocationString() { PsiFile containingFile = element.getContainingFile(); return containingFile == null ? null : containingFile.getName(); } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.CTX_ACTION; } }; } public static ItemPresentation forTransition(final SmcTransition element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getQualifiedFullName(); } @Nullable @Override public String getLocationString() { PsiFile containingFile = element.getContainingFile(); return containingFile == null ? null : containingFile.getName(); } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.TRANSITION; } }; } public static ItemPresentation forTransitionInStructure(final SmcTransition element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getFullName(); } @Nullable @Override public String getLocationString() { return null; } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.TRANSITION; } }; } public static ItemPresentation forEntry(final SmcEntry element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getType(); } @Nullable @Override public String getLocationString() { PsiFile containingFile = element.getContainingFile(); return containingFile == null ? null : containingFile.getName(); } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.STATE_ENTRY; } }; } public static ItemPresentation forExit(final SmcExit element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getType(); } @Nullable @Override public String getLocationString() { PsiFile containingFile = element.getContainingFile(); return containingFile == null ? null : containingFile.getName(); } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.STATE_EXIT; } }; } public static ItemPresentation forExitInStructure(final SmcExit element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getType(); } @Nullable @Override public String getLocationString() { return null; } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.STATE_EXIT; } }; } public static ItemPresentation forEntryInStructure(final SmcEntry element) { return new ItemPresentation() { @Nullable @Override public String getPresentableText() { return element.getType(); } @Nullable @Override public String getLocationString() { return null; } @Nullable @Override public Icon getIcon(boolean unused) { return SmcIcons.STATE_ENTRY; } }; } }
package net.alpenblock.bungeeperms; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.UUID; import lombok.SneakyThrows; import net.alpenblock.bungeeperms.platform.Sender; import org.bukkit.event.Event; import org.bukkit.event.EventHandler; import org.bukkit.event.HandlerList; import org.bukkit.event.Listener; public class Statics { public static int countSequences(String s, String seq) { int count = 0; for (int i = 0; i < s.length() - seq.length() + 1; i++) { if (s.substring(i, i + seq.length()).equalsIgnoreCase(seq)) { count++; } } return count; } public static String getFullPlayerName(String player) { Sender p = BungeePerms.getInstance().getPlugin().getPlayer(player); if (p != null) { for (Sender pp : BungeePerms.getInstance().getPlugin().getPlayers()) { if (pp.getName().startsWith(player)) { return pp.getName(); } } return p.getName(); } else { return player; } } public static List<String> toList(String s, String seperator) { List<String> l = new ArrayList<>(); String ls = ""; for (int i = 0; i < (s.length() - seperator.length()) + 1; i++) { if (s.substring(i, i + seperator.length()).equalsIgnoreCase(seperator)) { l.add(ls); ls = ""; i = i + seperator.length() - 1; } else { ls += s.substring(i, i + 1); } } if (ls.length() > 0) { l.add(ls); } return l; } public static boolean argAlias(String arg, String... aliases) { for (int i = 0; i < aliases.length; i++) { if (aliases[i].equalsIgnoreCase(arg)) { return true; } } return false; } public static <T> T replaceField(Object instance, T var, String varname) { try { Field f = instance.getClass().getDeclaredField(varname); f.setAccessible(true); T old = (T) f.get(instance); f.set(instance, var); return old; } catch (Exception ex) { return null; } } public static <T> T getField(Object instance, Class<T> type, String varname) { try { Field f = instance.getClass().getDeclaredField(varname); f.setAccessible(true); T old = (T) f.get(instance); return old; } catch (Exception ex) { return null; } } public static <T> T getField(Class clazz, Object instance, Class<T> type, String varname) { try { Field f = clazz.getDeclaredField(varname); f.setAccessible(true); T old = (T) f.get(instance); return old; } catch (Exception ex) { return null; } } public static void setField(Object instance, Object var, String varname) { try { Field f = instance.getClass().getDeclaredField(varname); f.setAccessible(true); f.set(instance, var); } catch (Exception ex) { } } public static void setField(Class clazz, Object instance, Object var, String varname) { try { Field f = clazz.getDeclaredField(varname); f.setAccessible(true); f.set(instance, var); } catch (Exception ex) { } } public static UUID parseUUID(String s) { try { return UUID.fromString(s); } catch (Exception e) { } if (s.length() == 32) { s = s.substring(0, 8) + "-" + s.substring(8, 12) + "-" + s.substring(12, 16) + "-" + s.substring(16, 20) + "-" + s.substring(20, 32) + "-"; try { return UUID.fromString(s); } catch (Exception e) { } } return null; } public static boolean matchArgs(Sender sender, String[] args, int length) { if (args.length > length) { Messages.sendTooManyArgsMessage(sender); return false; } else if (args.length < length) { Messages.sendTooLessArgsMessage(sender); return false; } return true; } public static boolean matchArgs(Sender sender, String[] args, int min, int max) { if (args.length > max) { Messages.sendTooManyArgsMessage(sender); return false; } else if (args.length < min) { Messages.sendTooLessArgsMessage(sender); return false; } return true; } public static String format(String format, Object... args) { return MessageFormat.format(format, args); } public static String localeString(Locale locale) { return locale.getLanguage() + (locale.getCountry().isEmpty() ? "" : "-" + locale.getCountry()); } public static String toLower(String s) { return s == null ? null : s.toLowerCase(); } public static boolean isBungeeConsole(Object o) { return o.getClass().getName().equals("net.md_5.bungee.command.ConsoleCommandSender"); } @SneakyThrows public static void unregisterListener(Listener l) { for (Method m : l.getClass().getDeclaredMethods()) { if (m.getAnnotation(EventHandler.class) != null && m.getParameterTypes().length > 0) { Class eventclass = m.getParameterTypes()[0]; if (Event.class.isAssignableFrom(eventclass)) { HandlerList hl = (HandlerList) eventclass.getMethod("getHandlerList").invoke(null); hl.unregister(l); } } } } public static boolean listContains(List<String> list, String element) { for (String l : list) { if (l.equalsIgnoreCase(element)) { return true; } } return false; } public static boolean isEmpty(String s) { return s == null || s.isEmpty(); } public static String formatDisplay(String append) { return isEmpty(append) ? "" : append + " "; } }
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.yangutils.translator.tojava; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.onosproject.yangutils.datamodel.YangDataTypes; import org.onosproject.yangutils.datamodel.YangEnum; import org.onosproject.yangutils.datamodel.YangEnumeration; import org.onosproject.yangutils.datamodel.YangNode; import org.onosproject.yangutils.translator.exception.TranslatorException; import org.onosproject.yangutils.translator.tojava.javamodel.YangJavaType; import org.onosproject.yangutils.utils.io.impl.YangPluginConfig; import static org.onosproject.yangutils.translator.tojava.GeneratedTempFileType.ENUM_IMPL_MASK; import static org.onosproject.yangutils.translator.tojava.JavaAttributeInfo.getAttributeInfoForTheData; import static org.onosproject.yangutils.translator.tojava.utils.JavaCodeSnippetGen.generateEnumAttributeString; import static org.onosproject.yangutils.translator.tojava.utils.JavaFileGenerator.generateEnumClassFile; import static org.onosproject.yangutils.translator.tojava.utils.JavaIdentifierSyntax.getEnumJavaAttribute; import static org.onosproject.yangutils.utils.io.impl.YangIoUtils.getPrefixForIdentifier; import static org.onosproject.yangutils.translator.tojava.utils.TempJavaCodeFragmentFilesUtils.closeFile; import static org.onosproject.yangutils.utils.UtilConstants.EMPTY_STRING; import static org.onosproject.yangutils.utils.UtilConstants.REGEX_FOR_FIRST_DIGIT; import static org.onosproject.yangutils.utils.UtilConstants.YANG_AUTO_PREFIX; import static org.onosproject.yangutils.translator.tojava.utils.JavaIdentifierSyntax.createPackage; /** * Represents implementation of java code fragments temporary implementations. * Maintains the temp files required specific for enumeration java snippet generation. */ public class TempJavaEnumerationFragmentFiles extends TempJavaFragmentFiles { /** * File name for temporary enum class. */ private static final String ENUM_CLASS_TEMP_FILE_NAME = "EnumClass"; /** * File name for enum class file name suffix. */ private static final String ENUM_CLASS_FILE_NAME_SUFFIX = EMPTY_STRING; /** * Current enum's value. */ private int enumValue; /** * Contains data of enumSet. */ private Map<String, Integer> enumStringMap = new HashMap<>(); /** * Contains data of enumSet. */ private List<String> enumStringList; /** * Temporary file handle for enum class file. */ private File enumClassTempFileHandle; /** * Java file handle for enum class. */ private File enumClassJavaFileHandle; /** * Creates an instance of temporary java code fragment. * * @param javaFileInfo generated java file info * @throws IOException when fails to create new file handle */ public TempJavaEnumerationFragmentFiles(JavaFileInfo javaFileInfo) throws IOException { super(javaFileInfo); setEnumSetJavaMap(new HashMap<>()); setEnumStringList(new ArrayList<>()); /* * Initialize enum when generation file type matches to enum class mask. */ addGeneratedTempFile(ENUM_IMPL_MASK); setEnumClassTempFileHandle(getTemporaryFileHandle(ENUM_CLASS_TEMP_FILE_NAME)); } /** * Returns enum class java file handle. * * @return enum class java file handle */ public File getEnumClassJavaFileHandle() { return enumClassJavaFileHandle; } /** * Sets enum class java file handle. * * @param enumClassJavaFileHandle enum class java file handle */ private void setEnumClassJavaFileHandle(File enumClassJavaFileHandle) { this.enumClassJavaFileHandle = enumClassJavaFileHandle; } /** * Returns enum's value. * * @return enum's value */ private int getEnumValue() { return enumValue; } /** * Sets enum's value. * * @param enumValue enum's value */ private void setEnumValue(int enumValue) { this.enumValue = enumValue; } /** * Returns enum set java map. * * @return the enum set java map */ public Map<String, Integer> getEnumSetJavaMap() { return enumStringMap; } /** * Sets enum set java map. * * @param map the enum set java map to set */ private void setEnumSetJavaMap(Map<String, Integer> map) { this.enumStringMap = map; } /** * Returns temporary file handle for enum class file. * * @return temporary file handle for enum class file */ public File getEnumClassTempFileHandle() { return enumClassTempFileHandle; } /** * Sets temporary file handle for enum class file. * * @param enumClassTempFileHandle temporary file handle for enum class file */ private void setEnumClassTempFileHandle(File enumClassTempFileHandle) { this.enumClassTempFileHandle = enumClassTempFileHandle; } /** * Adds enum class attributes to temporary file. * * @param curEnumName current YANG enum * @throws IOException when fails to do IO operations. */ private void addAttributesForEnumClass(String curEnumName, YangPluginConfig pluginConfig) throws IOException { appendToFile(getEnumClassTempFileHandle(), generateEnumAttributeString(curEnumName, getEnumValue(), pluginConfig)); } /** * Adds enum attributes to temporary files. * * @param curNode current YANG node * @param pluginConfig plugin configurations * @throws IOException when fails to do IO operations */ public void addEnumAttributeToTempFiles(YangNode curNode, YangPluginConfig pluginConfig) throws IOException { super.addJavaSnippetInfoToApplicableTempFiles(getJavaAttributeForEnum(pluginConfig), pluginConfig); if (curNode instanceof YangEnumeration) { YangEnumeration enumeration = (YangEnumeration) curNode; for (YangEnum curEnum : enumeration.getEnumSet()) { String enumName = curEnum.getNamedValue(); String prefixForIdentifier = null; if (enumName.matches(REGEX_FOR_FIRST_DIGIT)) { prefixForIdentifier = getPrefixForIdentifier(pluginConfig.getConflictResolver()); if (prefixForIdentifier != null) { curEnum.setNamedValue(prefixForIdentifier + enumName); } else { curEnum.setNamedValue(YANG_AUTO_PREFIX + enumName); } } setEnumValue(curEnum.getValue()); addToEnumStringList(curEnum.getNamedValue()); addToEnumSetJavaMap(curEnum.getNamedValue(), curEnum.getValue()); addJavaSnippetInfoToApplicableTempFiles(curEnum.getNamedValue(), pluginConfig); } } else { throw new TranslatorException("current node should be of enumeration type."); } } /** * Returns java attribute for enum class. * * @param pluginConfig plugin configurations * @return java attribute */ public JavaAttributeInfo getJavaAttributeForEnum(YangPluginConfig pluginConfig) { YangJavaType<?> javaType = new YangJavaType<>(); javaType.setDataType(YangDataTypes.INT32); javaType.setDataTypeName("int"); javaType.updateJavaQualifiedInfo(pluginConfig.getConflictResolver()); return getAttributeInfoForTheData( javaType.getJavaQualifiedInfo(), javaType.getDataTypeName(), javaType, getIsQualifiedAccessOrAddToImportList(javaType.getJavaQualifiedInfo()), false); } /** * Adds current enum name to java list. * * @param curEnumName current enum name */ private void addToEnumSetJavaMap(String curEnumName, int value) { getEnumSetJavaMap().put(getEnumJavaAttribute(curEnumName).toUpperCase(), value); } /** * Adds the new attribute info to the target generated temporary files. * * @param curEnumName the attribute name that needs to be added to temporary * files * @throws IOException IO operation fail */ void addJavaSnippetInfoToApplicableTempFiles(String curEnumName, YangPluginConfig pluginConfig) throws IOException { addAttributesForEnumClass(getEnumJavaAttribute(curEnumName), pluginConfig); } /** * Constructs java code exit. * * @param fileType generated file type * @param curNode current YANG node * @throws IOException when fails to generate java files */ @Override public void generateJavaFile(int fileType, YangNode curNode) throws IOException { createPackage(curNode); setEnumClassJavaFileHandle(getJavaFileHandle(getJavaClassName(ENUM_CLASS_FILE_NAME_SUFFIX))); setEnumClassJavaFileHandle(generateEnumClassFile(getEnumClassJavaFileHandle(), curNode)); freeTemporaryResources(false); } /** * Removes all temporary file handles. * * @param isErrorOccurred when translator fails to generate java files we * need to close all open file handles include temporary files * and java files. * @throws IOException when failed to delete the temporary files */ @Override public void freeTemporaryResources(boolean isErrorOccurred) throws IOException { closeFile(getEnumClassJavaFileHandle(), isErrorOccurred); closeFile(getEnumClassTempFileHandle(), true); super.freeTemporaryResources(isErrorOccurred); } /** * Adds to enum string list. * * @param curEnumValue current enum value */ private void addToEnumStringList(String curEnumValue) { getEnumStringList().add(getEnumJavaAttribute(curEnumValue).toUpperCase()); } /** * Returns enum string list. * * @return the enumStringList */ public List<String> getEnumStringList() { return enumStringList; } /** * Sets enum string list. * * @param enumStringList the enumStringList to set */ public void setEnumStringList(List<String> enumStringList) { this.enumStringList = enumStringList; } }
/* * Copyright 2018 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.plugin.access.pluggabletask; import com.thoughtworks.go.plugin.access.common.AbstractExtension; import com.thoughtworks.go.plugin.access.common.settings.PluginSettingsConfiguration; import com.thoughtworks.go.plugin.access.common.settings.PluginSettingsConstants; import com.thoughtworks.go.plugin.access.common.settings.PluginSettingsJsonMessageHandler1_0; import com.thoughtworks.go.plugin.api.request.GoPluginApiRequest; import com.thoughtworks.go.plugin.api.response.DefaultGoApiResponse; import com.thoughtworks.go.plugin.api.response.DefaultGoPluginApiResponse; import com.thoughtworks.go.plugin.api.response.GoPluginApiResponse; import com.thoughtworks.go.plugin.api.response.execution.ExecutionResult; import com.thoughtworks.go.plugin.api.response.validation.ValidationResult; import com.thoughtworks.go.plugin.api.task.TaskConfig; import com.thoughtworks.go.plugin.infra.Action; import com.thoughtworks.go.plugin.infra.ActionWithReturn; import com.thoughtworks.go.plugin.infra.PluginManager; import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.ArgumentCaptor; import org.mockito.Mock; import java.util.ArrayList; import java.util.Collections; import static com.thoughtworks.go.plugin.domain.common.PluginConstants.PLUGGABLE_TASK_EXTENSION; import static org.hamcrest.core.Is.is; import static org.junit.Assert.*; import static org.mockito.Mockito.*; import static org.mockito.MockitoAnnotations.initMocks; public class TaskExtensionTest { @Mock private PluginManager pluginManager; @Mock private PluginSettingsJsonMessageHandler1_0 pluginSettingsJSONMessageHandler; @Mock private JsonBasedTaskExtensionHandler jsonMessageHandler; @Rule public ExpectedException thrown = ExpectedException.none(); private TaskExtension extension; private String pluginId; private PluginSettingsConfiguration pluginSettingsConfiguration; private ArgumentCaptor<GoPluginApiRequest> requestArgumentCaptor; @Before public void setup() { initMocks(this); extension = new TaskExtension(pluginManager); pluginId = "plugin-id"; when(pluginManager.resolveExtensionVersion(eq(pluginId), eq(PLUGGABLE_TASK_EXTENSION), any(ArrayList.class))).thenReturn("1.0"); pluginSettingsConfiguration = new PluginSettingsConfiguration(); requestArgumentCaptor = ArgumentCaptor.forClass(GoPluginApiRequest.class); } @Test public void shouldExtendAbstractExtension() throws Exception { assertTrue(extension instanceof AbstractExtension); } @Test public void shouldTalkToPluginToGetPluginSettingsConfiguration() throws Exception { extension.registerHandler("1.0", pluginSettingsJSONMessageHandler); extension.messageHandlerMap.put("1.0", jsonMessageHandler); String responseBody = "expected-response"; PluginSettingsConfiguration deserializedResponse = new PluginSettingsConfiguration(); when(pluginSettingsJSONMessageHandler.responseMessageForPluginSettingsConfiguration(responseBody)).thenReturn(deserializedResponse); when(pluginManager.isPluginOfType(PLUGGABLE_TASK_EXTENSION, pluginId)).thenReturn(true); when(pluginManager.submitTo(eq(pluginId), eq(PLUGGABLE_TASK_EXTENSION), requestArgumentCaptor.capture())).thenReturn(DefaultGoPluginApiResponse.success(responseBody)); PluginSettingsConfiguration response = extension.getPluginSettingsConfiguration(pluginId); assertRequest(requestArgumentCaptor.getValue(), PLUGGABLE_TASK_EXTENSION, "1.0", PluginSettingsConstants.REQUEST_PLUGIN_SETTINGS_CONFIGURATION, null); verify(pluginSettingsJSONMessageHandler).responseMessageForPluginSettingsConfiguration(responseBody); assertSame(response, deserializedResponse); } @Test public void shouldTalkToPluginToGetPluginSettingsView() throws Exception { extension.registerHandler("1.0", pluginSettingsJSONMessageHandler); extension.messageHandlerMap.put("1.0", jsonMessageHandler); String responseBody = "expected-response"; String deserializedResponse = ""; when(pluginSettingsJSONMessageHandler.responseMessageForPluginSettingsView(responseBody)).thenReturn(deserializedResponse); when(pluginManager.isPluginOfType(PLUGGABLE_TASK_EXTENSION, pluginId)).thenReturn(true); when(pluginManager.submitTo(eq(pluginId), eq(PLUGGABLE_TASK_EXTENSION), requestArgumentCaptor.capture())).thenReturn(DefaultGoPluginApiResponse.success(responseBody)); String response = extension.getPluginSettingsView(pluginId); assertRequest(requestArgumentCaptor.getValue(), PLUGGABLE_TASK_EXTENSION, "1.0", PluginSettingsConstants.REQUEST_PLUGIN_SETTINGS_VIEW, null); verify(pluginSettingsJSONMessageHandler).responseMessageForPluginSettingsView(responseBody); assertSame(response, deserializedResponse); } @Test public void shouldTalkToPluginToValidatePluginSettings() throws Exception { extension.registerHandler("1.0", pluginSettingsJSONMessageHandler); extension.messageHandlerMap.put("1.0", jsonMessageHandler); String requestBody = "expected-request"; when(pluginSettingsJSONMessageHandler.requestMessageForPluginSettingsValidation(pluginSettingsConfiguration)).thenReturn(requestBody); String responseBody = "expected-response"; ValidationResult deserializedResponse = new ValidationResult(); when(pluginSettingsJSONMessageHandler.responseMessageForPluginSettingsValidation(responseBody)).thenReturn(deserializedResponse); when(pluginManager.isPluginOfType(PLUGGABLE_TASK_EXTENSION, pluginId)).thenReturn(true); when(pluginManager.submitTo(eq(pluginId), eq(PLUGGABLE_TASK_EXTENSION), requestArgumentCaptor.capture())).thenReturn(DefaultGoPluginApiResponse.success(responseBody)); ValidationResult response = extension.validatePluginSettings(pluginId, pluginSettingsConfiguration); assertRequest(requestArgumentCaptor.getValue(), PLUGGABLE_TASK_EXTENSION, "1.0", PluginSettingsConstants.REQUEST_VALIDATE_PLUGIN_SETTINGS, requestBody); verify(pluginSettingsJSONMessageHandler).responseMessageForPluginSettingsValidation(responseBody); assertSame(response, deserializedResponse); } @Test public void shouldExecuteTheTask() { ActionWithReturn actionWithReturn = mock(ActionWithReturn.class); when(actionWithReturn.execute(any(JsonBasedPluggableTask.class), any(GoPluginDescriptor.class))).thenReturn(ExecutionResult.success("yay")); ExecutionResult executionResult = extension.execute(pluginId, actionWithReturn); verify(actionWithReturn).execute(any(JsonBasedPluggableTask.class), any(GoPluginDescriptor.class)); assertThat(executionResult.getMessagesForDisplay(), is("yay")); assertTrue(executionResult.isSuccessful()); } @Test public void shouldPerformTheActionOnTask() { Action action = mock(Action.class); final GoPluginDescriptor descriptor = mock(GoPluginDescriptor.class); when(pluginManager.getPluginDescriptorFor(pluginId)).thenReturn(descriptor); extension.doOnTask(pluginId, action); verify(action).execute(any(JsonBasedPluggableTask.class), eq(descriptor)); } @Test public void shouldNotSupportFetchingPluginSettings() throws Exception { thrown.expect(UnsupportedOperationException.class); thrown.expectMessage("Fetch PluginSettings is not supported by Task Endpoint."); extension.pluginSettingsJSON("plugin_id", Collections.emptyMap()); } @Test public void shouldNotSupportServerInfoToJSON() throws Exception { thrown.expect(UnsupportedOperationException.class); thrown.expectMessage("Fetch Server Info is not supported by Task endpoint."); extension.serverInfoJSON("plugin_id", "server_id", "site_url", "secure_site_url"); } @Test public void shouldValidateTask() { GoPluginApiResponse response = mock(GoPluginApiResponse.class); TaskExtension jsonBasedTaskExtension = new TaskExtension(pluginManager); TaskConfig taskConfig = mock(TaskConfig.class); when(response.responseCode()).thenReturn(DefaultGoApiResponse.SUCCESS_RESPONSE_CODE); when(pluginManager.isPluginOfType(PLUGGABLE_TASK_EXTENSION, pluginId)).thenReturn(true); when(response.responseBody()).thenReturn("{\"errors\":{\"key\":\"error\"}}"); when(pluginManager.submitTo(eq(pluginId), eq(PLUGGABLE_TASK_EXTENSION), any(GoPluginApiRequest.class))).thenReturn(response); ValidationResult validationResult = jsonBasedTaskExtension.validate(pluginId, taskConfig); verify(pluginManager).submitTo(eq(pluginId), eq(PLUGGABLE_TASK_EXTENSION), any(GoPluginApiRequest.class)); assertFalse(validationResult.isSuccessful()); assertEquals(validationResult.getErrors().get(0).getKey(), "key"); assertEquals(validationResult.getErrors().get(0).getMessage(), "error"); } private void assertRequest(GoPluginApiRequest goPluginApiRequest, String extensionName, String version, String requestName, String requestBody) { assertThat(goPluginApiRequest.extension(), is(extensionName)); assertThat(goPluginApiRequest.extensionVersion(), is(version)); assertThat(goPluginApiRequest.requestName(), is(requestName)); assertThat(goPluginApiRequest.requestBody(), is(requestBody)); } }
/* * Copyright 2008 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jwetherell.quick_response_code.qrcode.encoder; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import com.google.zxing.EncodeHintType; import com.google.zxing.WriterException; import com.google.zxing.common.BitArray; import com.google.zxing.common.CharacterSetECI; import com.google.zxing.common.reedsolomon.GenericGF; import com.google.zxing.common.reedsolomon.ReedSolomonEncoder; import com.jwetherell.quick_response_code.qrcode.decoder.ErrorCorrectionLevel; import com.jwetherell.quick_response_code.qrcode.decoder.Mode; import com.jwetherell.quick_response_code.qrcode.decoder.Version; /** * @author satorux@google.com (Satoru Takabayashi) - creator * @author dswitkin@google.com (Daniel Switkin) - ported from C++ */ public final class Encoder { // The original table is defined in the table 5 of JISX0510:2004 (p.19). private static final int[] ALPHANUMERIC_TABLE = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 0x00-0x0f -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 0x10-0x1f 36, -1, -1, -1, 37, 38, -1, -1, -1, -1, 39, 40, -1, 41, 42, 43, // 0x20-0x2f 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 44, -1, -1, -1, -1, -1, // 0x30-0x3f -1, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, // 0x40-0x4f 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, -1, -1, -1, -1, -1, // 0x50-0x5f }; static final String DEFAULT_BYTE_MODE_ENCODING = "ISO-8859-1"; private Encoder() { } // The mask penalty calculation is complicated. See Table 21 of // JISX0510:2004 (p.45) for // details. // Basically it applies four rules and summate all penalties. private static int calculateMaskPenalty(ByteMatrix matrix) { int penalty = 0; penalty += MaskUtil.applyMaskPenaltyRule1(matrix); penalty += MaskUtil.applyMaskPenaltyRule2(matrix); penalty += MaskUtil.applyMaskPenaltyRule3(matrix); penalty += MaskUtil.applyMaskPenaltyRule4(matrix); return penalty; } /** * Encode "bytes" with the error correction level "ecLevel". The encoding * mode will be chosen internally by chooseMode(). On success, store the * result in "qrCode". * * We recommend you to use QRCode.EC_LEVEL_L (the lowest level) for * "getECLevel" since our primary use is to show QR code on desktop screens. * We don't need very strong error correction for this purpose. * * Note that there is no way to encode bytes in MODE_KANJI. We might want to * add EncodeWithMode() with which clients can specify the encoding mode. * For now, we don't need the functionality. */ public static void encode(String content, ErrorCorrectionLevel ecLevel, QRCode qrCode) throws WriterException { encode(content, ecLevel, null, qrCode); } public static void encode(String content, ErrorCorrectionLevel ecLevel, Map<EncodeHintType, ?> hints, QRCode qrCode) throws WriterException { String encoding = hints == null ? null : (String) hints.get(EncodeHintType.CHARACTER_SET); if (encoding == null) { encoding = DEFAULT_BYTE_MODE_ENCODING; } // Step 1: Choose the mode (encoding). Mode mode = chooseMode(content, encoding); // Step 2: Append "bytes" into "dataBits" in appropriate encoding. BitArray dataBits = new BitArray(); appendBytes(content, mode, dataBits, encoding); // Step 3: Initialize QR code that can contain "dataBits". int numInputBits = dataBits.getSize(); initQRCode(numInputBits, ecLevel, mode, qrCode); // Step 4: Build another bit vector that contains header and data. BitArray headerAndDataBits = new BitArray(); // Step 4.5: Append ECI message if applicable if (mode == Mode.BYTE && !DEFAULT_BYTE_MODE_ENCODING.equals(encoding)) { CharacterSetECI eci = CharacterSetECI.getCharacterSetECIByName(encoding); if (eci != null) { appendECI(eci, headerAndDataBits); } } appendModeInfo(mode, headerAndDataBits); int numLetters = mode == Mode.BYTE ? dataBits.getSizeInBytes() : content.length(); appendLengthInfo(numLetters, qrCode.getVersion(), mode, headerAndDataBits); headerAndDataBits.appendBitArray(dataBits); // Step 5: Terminate the bits properly. terminateBits(qrCode.getNumDataBytes(), headerAndDataBits); // Step 6: Interleave data bits with error correction code. BitArray finalBits = new BitArray(); interleaveWithECBytes(headerAndDataBits, qrCode.getNumTotalBytes(), qrCode.getNumDataBytes(), qrCode.getNumRSBlocks(), finalBits); // Step 7: Choose the mask pattern and set to "qrCode". ByteMatrix matrix = new ByteMatrix(qrCode.getMatrixWidth(), qrCode.getMatrixWidth()); qrCode.setMaskPattern(chooseMaskPattern(finalBits, ecLevel, qrCode.getVersion(), matrix)); // Step 8. Build the matrix and set it to "qrCode". MatrixUtil.buildMatrix(finalBits, ecLevel, qrCode.getVersion(), qrCode.getMaskPattern(), matrix); qrCode.setMatrix(matrix); // Step 9. Make sure we have a valid QR Code. if (!qrCode.isValid()) { throw new WriterException("Invalid QR code: " + qrCode.toString()); } } /** * @return the code point of the table used in alphanumeric mode or -1 if * there is no corresponding code in the table. */ static int getAlphanumericCode(int code) { if (code < ALPHANUMERIC_TABLE.length) { return ALPHANUMERIC_TABLE[code]; } return -1; } public static Mode chooseMode(String content) { return chooseMode(content, null); } /** * Choose the best mode by examining the content. Note that 'encoding' is * used as a hint; if it is Shift_JIS, and the input is only double-byte * Kanji, then we return {@link Mode#KANJI}. */ private static Mode chooseMode(String content, String encoding) { if ("Shift_JIS".equals(encoding)) { // Choose Kanji mode if all input are double-byte characters return isOnlyDoubleByteKanji(content) ? Mode.KANJI : Mode.BYTE; } boolean hasNumeric = false; boolean hasAlphanumeric = false; for (int i = 0; i < content.length(); ++i) { char c = content.charAt(i); if (c >= '0' && c <= '9') { hasNumeric = true; } else if (getAlphanumericCode(c) != -1) { hasAlphanumeric = true; } else { return Mode.BYTE; } } if (hasAlphanumeric) { return Mode.ALPHANUMERIC; } if (hasNumeric) { return Mode.NUMERIC; } return Mode.BYTE; } private static boolean isOnlyDoubleByteKanji(String content) { byte[] bytes; try { bytes = content.getBytes("Shift_JIS"); } catch (UnsupportedEncodingException uee) { return false; } int length = bytes.length; if (length % 2 != 0) { return false; } for (int i = 0; i < length; i += 2) { int byte1 = bytes[i] & 0xFF; if ((byte1 < 0x81 || byte1 > 0x9F) && (byte1 < 0xE0 || byte1 > 0xEB)) { return false; } } return true; } private static int chooseMaskPattern(BitArray bits, ErrorCorrectionLevel ecLevel, int version, ByteMatrix matrix) throws WriterException { int minPenalty = Integer.MAX_VALUE; // Lower penalty is better. int bestMaskPattern = -1; // We try all mask patterns to choose the best one. for (int maskPattern = 0; maskPattern < QRCode.NUM_MASK_PATTERNS; maskPattern++) { MatrixUtil.buildMatrix(bits, ecLevel, version, maskPattern, matrix); int penalty = calculateMaskPenalty(matrix); if (penalty < minPenalty) { minPenalty = penalty; bestMaskPattern = maskPattern; } } return bestMaskPattern; } /** * Initialize "qrCode" according to "numInputBits", "ecLevel", and "mode". * On success, modify "qrCode". */ private static void initQRCode(int numInputBits, ErrorCorrectionLevel ecLevel, Mode mode, QRCode qrCode) throws WriterException { qrCode.setECLevel(ecLevel); qrCode.setMode(mode); // In the following comments, we use numbers of Version 7-H. for (int versionNum = 1; versionNum <= 40; versionNum++) { Version version = Version.getVersionForNumber(versionNum); // numBytes = 196 int numBytes = version.getTotalCodewords(); // getNumECBytes = 130 Version.ECBlocks ecBlocks = version.getECBlocksForLevel(ecLevel); int numEcBytes = ecBlocks.getTotalECCodewords(); // getNumRSBlocks = 5 int numRSBlocks = ecBlocks.getNumBlocks(); // getNumDataBytes = 196 - 130 = 66 int numDataBytes = numBytes - numEcBytes; // We want to choose the smallest version which can contain data of // "numInputBytes" + // some // extra bits for the header (mode info and length info). The header // can be three bytes // (precisely 4 + 16 bits) at most. if (numDataBytes >= getTotalInputBytes(numInputBits, version, mode)) { // Yay, we found the proper rs block info! qrCode.setVersion(versionNum); qrCode.setNumTotalBytes(numBytes); qrCode.setNumDataBytes(numDataBytes); qrCode.setNumRSBlocks(numRSBlocks); // getNumECBytes = 196 - 66 = 130 qrCode.setNumECBytes(numEcBytes); // matrix width = 21 + 6 * 4 = 45 qrCode.setMatrixWidth(version.getDimensionForVersion()); return; } } throw new WriterException("Cannot find proper rs block info (input data too big?)"); } private static int getTotalInputBytes(int numInputBits, Version version, Mode mode) { int modeInfoBits = 4; int charCountBits = mode.getCharacterCountBits(version); int headerBits = modeInfoBits + charCountBits; int totalBits = numInputBits + headerBits; return (totalBits + 7) / 8; } /** * Terminate bits as described in 8.4.8 and 8.4.9 of JISX0510:2004 (p.24). */ static void terminateBits(int numDataBytes, BitArray bits) throws WriterException { int capacity = numDataBytes << 3; if (bits.getSize() > capacity) { throw new WriterException("data bits cannot fit in the QR Code" + bits.getSize() + " > " + capacity); } for (int i = 0; i < 4 && bits.getSize() < capacity; ++i) { bits.appendBit(false); } // Append termination bits. See 8.4.8 of JISX0510:2004 (p.24) for // details. // If the last byte isn't 8-bit aligned, we'll add padding bits. int numBitsInLastByte = bits.getSize() & 0x07; if (numBitsInLastByte > 0) { for (int i = numBitsInLastByte; i < 8; i++) { bits.appendBit(false); } } // If we have more space, we'll fill the space with padding patterns // defined in 8.4.9 // (p.24). int numPaddingBytes = numDataBytes - bits.getSizeInBytes(); for (int i = 0; i < numPaddingBytes; ++i) { bits.appendBits((i & 0x01) == 0 ? 0xEC : 0x11, 8); } if (bits.getSize() != capacity) { throw new WriterException("Bits size does not equal capacity"); } } /** * Get number of data bytes and number of error correction bytes for block * id "blockID". Store the result in "numDataBytesInBlock", and * "numECBytesInBlock". See table 12 in 8.5.1 of JISX0510:2004 (p.30) */ static void getNumDataBytesAndNumECBytesForBlockID(int numTotalBytes, int numDataBytes, int numRSBlocks, int blockID, int[] numDataBytesInBlock, int[] numECBytesInBlock) throws WriterException { if (blockID >= numRSBlocks) { throw new WriterException("Block ID too large"); } // numRsBlocksInGroup2 = 196 % 5 = 1 int numRsBlocksInGroup2 = numTotalBytes % numRSBlocks; // numRsBlocksInGroup1 = 5 - 1 = 4 int numRsBlocksInGroup1 = numRSBlocks - numRsBlocksInGroup2; // numTotalBytesInGroup1 = 196 / 5 = 39 int numTotalBytesInGroup1 = numTotalBytes / numRSBlocks; // numTotalBytesInGroup2 = 39 + 1 = 40 int numTotalBytesInGroup2 = numTotalBytesInGroup1 + 1; // numDataBytesInGroup1 = 66 / 5 = 13 int numDataBytesInGroup1 = numDataBytes / numRSBlocks; // numDataBytesInGroup2 = 13 + 1 = 14 int numDataBytesInGroup2 = numDataBytesInGroup1 + 1; // numEcBytesInGroup1 = 39 - 13 = 26 int numEcBytesInGroup1 = numTotalBytesInGroup1 - numDataBytesInGroup1; // numEcBytesInGroup2 = 40 - 14 = 26 int numEcBytesInGroup2 = numTotalBytesInGroup2 - numDataBytesInGroup2; // Sanity checks. // 26 = 26 if (numEcBytesInGroup1 != numEcBytesInGroup2) { throw new WriterException("EC bytes mismatch"); } // 5 = 4 + 1. if (numRSBlocks != numRsBlocksInGroup1 + numRsBlocksInGroup2) { throw new WriterException("RS blocks mismatch"); } // 196 = (13 + 26) * 4 + (14 + 26) * 1 if (numTotalBytes != ((numDataBytesInGroup1 + numEcBytesInGroup1) * numRsBlocksInGroup1) + ((numDataBytesInGroup2 + numEcBytesInGroup2) * numRsBlocksInGroup2)) { throw new WriterException("Total bytes mismatch"); } if (blockID < numRsBlocksInGroup1) { numDataBytesInBlock[0] = numDataBytesInGroup1; numECBytesInBlock[0] = numEcBytesInGroup1; } else { numDataBytesInBlock[0] = numDataBytesInGroup2; numECBytesInBlock[0] = numEcBytesInGroup2; } } /** * Interleave "bits" with corresponding error correction bytes. On success, * store the result in "result". The interleave rule is complicated. See 8.6 * of JISX0510:2004 (p.37) for details. */ static void interleaveWithECBytes(BitArray bits, int numTotalBytes, int numDataBytes, int numRSBlocks, BitArray result) throws WriterException { // "bits" must have "getNumDataBytes" bytes of data. if (bits.getSizeInBytes() != numDataBytes) { throw new WriterException("Number of bits and data bytes does not match"); } // Step 1. Divide data bytes into blocks and generate error correction // bytes for them. We'll // store the divided data bytes blocks and error correction bytes blocks // into "blocks". int dataBytesOffset = 0; int maxNumDataBytes = 0; int maxNumEcBytes = 0; // Since, we know the number of reedsolmon blocks, we can initialize the // vector with the // number. Collection<BlockPair> blocks = new ArrayList<BlockPair>(numRSBlocks); for (int i = 0; i < numRSBlocks; ++i) { int[] numDataBytesInBlock = new int[1]; int[] numEcBytesInBlock = new int[1]; getNumDataBytesAndNumECBytesForBlockID(numTotalBytes, numDataBytes, numRSBlocks, i, numDataBytesInBlock, numEcBytesInBlock); int size = numDataBytesInBlock[0]; byte[] dataBytes = new byte[size]; bits.toBytes(8 * dataBytesOffset, dataBytes, 0, size); byte[] ecBytes = generateECBytes(dataBytes, numEcBytesInBlock[0]); blocks.add(new BlockPair(dataBytes, ecBytes)); maxNumDataBytes = Math.max(maxNumDataBytes, size); maxNumEcBytes = Math.max(maxNumEcBytes, ecBytes.length); dataBytesOffset += numDataBytesInBlock[0]; } if (numDataBytes != dataBytesOffset) { throw new WriterException("Data bytes does not match offset"); } // First, place data blocks. for (int i = 0; i < maxNumDataBytes; ++i) { for (BlockPair block : blocks) { byte[] dataBytes = block.getDataBytes(); if (i < dataBytes.length) { result.appendBits(dataBytes[i], 8); } } } // Then, place error correction blocks. for (int i = 0; i < maxNumEcBytes; ++i) { for (BlockPair block : blocks) { byte[] ecBytes = block.getErrorCorrectionBytes(); if (i < ecBytes.length) { result.appendBits(ecBytes[i], 8); } } } if (numTotalBytes != result.getSizeInBytes()) { // Should be same. throw new WriterException("Interleaving error: " + numTotalBytes + " and " + result.getSizeInBytes() + " differ."); } } static byte[] generateECBytes(byte[] dataBytes, int numEcBytesInBlock) { int numDataBytes = dataBytes.length; int[] toEncode = new int[numDataBytes + numEcBytesInBlock]; for (int i = 0; i < numDataBytes; i++) { toEncode[i] = dataBytes[i] & 0xFF; } new ReedSolomonEncoder(GenericGF.QR_CODE_FIELD_256).encode(toEncode, numEcBytesInBlock); byte[] ecBytes = new byte[numEcBytesInBlock]; for (int i = 0; i < numEcBytesInBlock; i++) { ecBytes[i] = (byte) toEncode[numDataBytes + i]; } return ecBytes; } /** * Append mode info. On success, store the result in "bits". */ static void appendModeInfo(Mode mode, BitArray bits) { bits.appendBits(mode.getBits(), 4); } /** * Append length info. On success, store the result in "bits". */ static void appendLengthInfo(int numLetters, int version, Mode mode, BitArray bits) throws WriterException { int numBits = mode.getCharacterCountBits(Version.getVersionForNumber(version)); if (numLetters > ((1 << numBits) - 1)) { throw new WriterException(numLetters + "is bigger than" + ((1 << numBits) - 1)); } bits.appendBits(numLetters, numBits); } /** * Append "bytes" in "mode" mode (encoding) into "bits". On success, store * the result in "bits". */ static void appendBytes(String content, Mode mode, BitArray bits, String encoding) throws WriterException { switch (mode) { case NUMERIC: appendNumericBytes(content, bits); break; case ALPHANUMERIC: appendAlphanumericBytes(content, bits); break; case BYTE: append8BitBytes(content, bits, encoding); break; case KANJI: appendKanjiBytes(content, bits); break; default: throw new WriterException("Invalid mode: " + mode); } } static void appendNumericBytes(CharSequence content, BitArray bits) { int length = content.length(); int i = 0; while (i < length) { int num1 = content.charAt(i) - '0'; if (i + 2 < length) { // Encode three numeric letters in ten bits. int num2 = content.charAt(i + 1) - '0'; int num3 = content.charAt(i + 2) - '0'; bits.appendBits(num1 * 100 + num2 * 10 + num3, 10); i += 3; } else if (i + 1 < length) { // Encode two numeric letters in seven bits. int num2 = content.charAt(i + 1) - '0'; bits.appendBits(num1 * 10 + num2, 7); i += 2; } else { // Encode one numeric letter in four bits. bits.appendBits(num1, 4); i++; } } } static void appendAlphanumericBytes(CharSequence content, BitArray bits) throws WriterException { int length = content.length(); int i = 0; while (i < length) { int code1 = getAlphanumericCode(content.charAt(i)); if (code1 == -1) { throw new WriterException(); } if (i + 1 < length) { int code2 = getAlphanumericCode(content.charAt(i + 1)); if (code2 == -1) { throw new WriterException(); } // Encode two alphanumeric letters in 11 bits. bits.appendBits(code1 * 45 + code2, 11); i += 2; } else { // Encode one alphanumeric letter in six bits. bits.appendBits(code1, 6); i++; } } } static void append8BitBytes(String content, BitArray bits, String encoding) throws WriterException { byte[] bytes; try { bytes = content.getBytes(encoding); } catch (UnsupportedEncodingException uee) { throw new WriterException(uee.toString()); } for (byte b : bytes) { bits.appendBits(b, 8); } } static void appendKanjiBytes(String content, BitArray bits) throws WriterException { byte[] bytes; try { bytes = content.getBytes("Shift_JIS"); } catch (UnsupportedEncodingException uee) { throw new WriterException(uee.toString()); } int length = bytes.length; for (int i = 0; i < length; i += 2) { int byte1 = bytes[i] & 0xFF; int byte2 = bytes[i + 1] & 0xFF; int code = (byte1 << 8) | byte2; int subtracted = -1; if (code >= 0x8140 && code <= 0x9ffc) { subtracted = code - 0x8140; } else if (code >= 0xe040 && code <= 0xebbf) { subtracted = code - 0xc140; } if (subtracted == -1) { throw new WriterException("Invalid byte sequence"); } int encoded = ((subtracted >> 8) * 0xc0) + (subtracted & 0xff); bits.appendBits(encoded, 13); } } private static void appendECI(CharacterSetECI eci, BitArray bits) { bits.appendBits(Mode.ECI.getBits(), 4); // This is correct for values up to 127, which is all we need now. bits.appendBits(eci.getValue(), 8); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.elasticinference.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The details of an Elastic Inference Accelerator. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elastic-inference-2017-07-25/ElasticInferenceAccelerator" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ElasticInferenceAccelerator implements Serializable, Cloneable, StructuredPojo { /** * <p> * The health of the Elastic Inference Accelerator. * </p> */ private ElasticInferenceAcceleratorHealth acceleratorHealth; /** * <p> * The type of the Elastic Inference Accelerator. * </p> */ private String acceleratorType; /** * <p> * The ID of the Elastic Inference Accelerator. * </p> */ private String acceleratorId; /** * <p> * The availability zone where the Elastic Inference Accelerator is present. * </p> */ private String availabilityZone; /** * <p> * The ARN of the resource that the Elastic Inference Accelerator is attached to. * </p> */ private String attachedResource; /** * <p> * The health of the Elastic Inference Accelerator. * </p> * * @param acceleratorHealth * The health of the Elastic Inference Accelerator. */ public void setAcceleratorHealth(ElasticInferenceAcceleratorHealth acceleratorHealth) { this.acceleratorHealth = acceleratorHealth; } /** * <p> * The health of the Elastic Inference Accelerator. * </p> * * @return The health of the Elastic Inference Accelerator. */ public ElasticInferenceAcceleratorHealth getAcceleratorHealth() { return this.acceleratorHealth; } /** * <p> * The health of the Elastic Inference Accelerator. * </p> * * @param acceleratorHealth * The health of the Elastic Inference Accelerator. * @return Returns a reference to this object so that method calls can be chained together. */ public ElasticInferenceAccelerator withAcceleratorHealth(ElasticInferenceAcceleratorHealth acceleratorHealth) { setAcceleratorHealth(acceleratorHealth); return this; } /** * <p> * The type of the Elastic Inference Accelerator. * </p> * * @param acceleratorType * The type of the Elastic Inference Accelerator. */ public void setAcceleratorType(String acceleratorType) { this.acceleratorType = acceleratorType; } /** * <p> * The type of the Elastic Inference Accelerator. * </p> * * @return The type of the Elastic Inference Accelerator. */ public String getAcceleratorType() { return this.acceleratorType; } /** * <p> * The type of the Elastic Inference Accelerator. * </p> * * @param acceleratorType * The type of the Elastic Inference Accelerator. * @return Returns a reference to this object so that method calls can be chained together. */ public ElasticInferenceAccelerator withAcceleratorType(String acceleratorType) { setAcceleratorType(acceleratorType); return this; } /** * <p> * The ID of the Elastic Inference Accelerator. * </p> * * @param acceleratorId * The ID of the Elastic Inference Accelerator. */ public void setAcceleratorId(String acceleratorId) { this.acceleratorId = acceleratorId; } /** * <p> * The ID of the Elastic Inference Accelerator. * </p> * * @return The ID of the Elastic Inference Accelerator. */ public String getAcceleratorId() { return this.acceleratorId; } /** * <p> * The ID of the Elastic Inference Accelerator. * </p> * * @param acceleratorId * The ID of the Elastic Inference Accelerator. * @return Returns a reference to this object so that method calls can be chained together. */ public ElasticInferenceAccelerator withAcceleratorId(String acceleratorId) { setAcceleratorId(acceleratorId); return this; } /** * <p> * The availability zone where the Elastic Inference Accelerator is present. * </p> * * @param availabilityZone * The availability zone where the Elastic Inference Accelerator is present. */ public void setAvailabilityZone(String availabilityZone) { this.availabilityZone = availabilityZone; } /** * <p> * The availability zone where the Elastic Inference Accelerator is present. * </p> * * @return The availability zone where the Elastic Inference Accelerator is present. */ public String getAvailabilityZone() { return this.availabilityZone; } /** * <p> * The availability zone where the Elastic Inference Accelerator is present. * </p> * * @param availabilityZone * The availability zone where the Elastic Inference Accelerator is present. * @return Returns a reference to this object so that method calls can be chained together. */ public ElasticInferenceAccelerator withAvailabilityZone(String availabilityZone) { setAvailabilityZone(availabilityZone); return this; } /** * <p> * The ARN of the resource that the Elastic Inference Accelerator is attached to. * </p> * * @param attachedResource * The ARN of the resource that the Elastic Inference Accelerator is attached to. */ public void setAttachedResource(String attachedResource) { this.attachedResource = attachedResource; } /** * <p> * The ARN of the resource that the Elastic Inference Accelerator is attached to. * </p> * * @return The ARN of the resource that the Elastic Inference Accelerator is attached to. */ public String getAttachedResource() { return this.attachedResource; } /** * <p> * The ARN of the resource that the Elastic Inference Accelerator is attached to. * </p> * * @param attachedResource * The ARN of the resource that the Elastic Inference Accelerator is attached to. * @return Returns a reference to this object so that method calls can be chained together. */ public ElasticInferenceAccelerator withAttachedResource(String attachedResource) { setAttachedResource(attachedResource); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAcceleratorHealth() != null) sb.append("AcceleratorHealth: ").append(getAcceleratorHealth()).append(","); if (getAcceleratorType() != null) sb.append("AcceleratorType: ").append(getAcceleratorType()).append(","); if (getAcceleratorId() != null) sb.append("AcceleratorId: ").append(getAcceleratorId()).append(","); if (getAvailabilityZone() != null) sb.append("AvailabilityZone: ").append(getAvailabilityZone()).append(","); if (getAttachedResource() != null) sb.append("AttachedResource: ").append(getAttachedResource()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ElasticInferenceAccelerator == false) return false; ElasticInferenceAccelerator other = (ElasticInferenceAccelerator) obj; if (other.getAcceleratorHealth() == null ^ this.getAcceleratorHealth() == null) return false; if (other.getAcceleratorHealth() != null && other.getAcceleratorHealth().equals(this.getAcceleratorHealth()) == false) return false; if (other.getAcceleratorType() == null ^ this.getAcceleratorType() == null) return false; if (other.getAcceleratorType() != null && other.getAcceleratorType().equals(this.getAcceleratorType()) == false) return false; if (other.getAcceleratorId() == null ^ this.getAcceleratorId() == null) return false; if (other.getAcceleratorId() != null && other.getAcceleratorId().equals(this.getAcceleratorId()) == false) return false; if (other.getAvailabilityZone() == null ^ this.getAvailabilityZone() == null) return false; if (other.getAvailabilityZone() != null && other.getAvailabilityZone().equals(this.getAvailabilityZone()) == false) return false; if (other.getAttachedResource() == null ^ this.getAttachedResource() == null) return false; if (other.getAttachedResource() != null && other.getAttachedResource().equals(this.getAttachedResource()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAcceleratorHealth() == null) ? 0 : getAcceleratorHealth().hashCode()); hashCode = prime * hashCode + ((getAcceleratorType() == null) ? 0 : getAcceleratorType().hashCode()); hashCode = prime * hashCode + ((getAcceleratorId() == null) ? 0 : getAcceleratorId().hashCode()); hashCode = prime * hashCode + ((getAvailabilityZone() == null) ? 0 : getAvailabilityZone().hashCode()); hashCode = prime * hashCode + ((getAttachedResource() == null) ? 0 : getAttachedResource().hashCode()); return hashCode; } @Override public ElasticInferenceAccelerator clone() { try { return (ElasticInferenceAccelerator) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.elasticinference.model.transform.ElasticInferenceAcceleratorMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.emm.system.service.api; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.net.ConnectivityManager; import android.net.NetworkInfo; import android.os.AsyncTask; import android.os.BatteryManager; import android.os.Build; import android.util.Log; import org.json.JSONException; import org.json.JSONObject; import org.wso2.emm.system.service.R; import org.wso2.emm.system.service.utils.CommonUtils; import org.wso2.emm.system.service.utils.Constants; import org.wso2.emm.system.service.utils.Preference; import java.io.IOException; import java.net.MalformedURLException; import java.net.SocketTimeoutException; import java.net.URL; import java.net.URLConnection; public class OTADownload implements OTAServerManager.OTAStateChangeListener { private static final String TAG = "OTADownload"; private static final String SI_UNITS_INDEX = "kMGTPE"; private static final String BINARY_UNITS_INDEX = "KMGTPE"; private static final String UPGRADE_AVAILABLE = "upgradeAvailable"; private static final String UPGRADE_VERSION = "version"; private static final String UPGRADE_RELEASE = "release"; private static final String UPGRADE_SIZE = "size"; private static final String UPGRADE_DESCRIPTION = "description"; private Context context; private OTAServerManager otaServerManager; public OTADownload(Context context) { this.context = context; Preference.putString(context, context.getResources().getString(R.string.upgrade_download_status), Constants.Status.REQUEST_PLACED); try { otaServerManager = new OTAServerManager(this.context); otaServerManager.setStateChangeListener(this); } catch (MalformedURLException e) { otaServerManager = null; String message = "Firmware upgrade URL provided is not valid."; if (Preference.getBoolean(context, context.getResources().getString(R.string. firmware_status_check_in_progress))) { CommonUtils.sendBroadcast(context, Constants.Operation.GET_FIRMWARE_UPGRADE_PACKAGE_STATUS, Constants.Code.FAILURE, Constants.Status.MALFORMED_OTA_URL, message); } else { CommonUtils.sendBroadcast(context, Constants.Operation.UPGRADE_FIRMWARE, Constants.Code.FAILURE, Constants.Status.MALFORMED_OTA_URL, message); CommonUtils.callAgentApp(context, Constants.Operation. FIRMWARE_UPGRADE_FAILURE, Preference.getInt( context, context.getResources().getString(R.string.operation_id)), message); } Log.e(TAG, "OTA server manager threw exception ..." + e); } } /** * Returns the byte count in a human readable format. * * @param bytes - Bytes to be converted. * @param isSI - True if the input is in SI units and False if the input is in binary units. * @return - Byte count string. */ public String byteCountToDisplaySize(long bytes, boolean isSI) { int unit = isSI ? 1000 : 1024; if (bytes < unit) { return bytes + " B"; } int numberToFormat = (int) (Math.log(bytes) / Math.log(unit)); String prefix = (isSI ? SI_UNITS_INDEX : BINARY_UNITS_INDEX).charAt(numberToFormat - 1) + (isSI ? "" : "i"); return String.format("%.1f %sB", bytes / Math.pow(unit, numberToFormat), prefix); } public void startOTA() { // If the URL is not correctly provided, server manager will be null if (otaServerManager != null) { CommonUtils.sendBroadcast(context, Constants.Operation.UPGRADE_FIRMWARE, Constants.Code.SUCCESS, Constants.Status.REQUEST_PLACED, null); //Check in the main service thread otaServerManager.startCheckingVersion(); } } private int getBatteryLevel(Context context) { Intent batteryIntent = context.registerReceiver(null, new IntentFilter( Intent.ACTION_BATTERY_CHANGED)); int level = 0; if (batteryIntent != null) { level = batteryIntent.getIntExtra(BatteryManager.EXTRA_LEVEL, 0); } return level; } public void onStateOrProgress(int message, int error, BuildPropParser parser, long info) { /* State change will be 0 -> Checked(1) -> Downloading(2) -> Upgrading(3) */ switch (message) { case STATE_IN_CHECKED: onStateChecked(error, parser); break; case STATE_IN_DOWNLOADING: onStateDownload(error, info); break; case STATE_IN_UPGRADING: onStateUpgrade(error); break; case MESSAGE_DOWNLOAD_PROGRESS: break; case MESSAGE_VERIFY_PROGRESS: onProgress(info); break; } } public boolean checkNetworkOnline() { ConnectivityManager connectivityManager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE); NetworkInfo info = connectivityManager.getActiveNetworkInfo(); boolean status = false; if (info != null && info.isConnectedOrConnecting()) { status = true; } return status; } public void onStateChecked(int error, final BuildPropParser parser) { final String operation = Preference.getBoolean(context, context.getResources().getString(R.string. firmware_status_check_in_progress)) ? Constants.Operation.GET_FIRMWARE_UPGRADE_PACKAGE_STATUS : Constants.Operation.UPGRADE_FIRMWARE; if (error == 0) { if (!otaServerManager.compareLocalVersionToServer(parser)) { Log.i(TAG, "Software is up to date:" + Build.VERSION.RELEASE + ", " + Build.ID); JSONObject result = new JSONObject(); try { result.put(UPGRADE_AVAILABLE, false); if (parser != null) { result.put(UPGRADE_DESCRIPTION, parser.getProp("Software is up to date")); } CommonUtils.sendBroadcast(context, operation, Constants.Code.SUCCESS, Constants.Status.NO_UPGRADE_FOUND, result.toString()); } catch (JSONException e) { String message = "Result payload build failed."; CommonUtils.sendBroadcast(context, operation, Constants.Code.FAILURE, Constants.Status.UPDATE_INFO_NOT_READABLE, message); Log.e(TAG, message + e); } } else if (checkNetworkOnline()) { new AsyncTask<Void, Void, Long>() { protected Long doInBackground(Void... param) { URL url = otaServerManager.getServerConfig().getPackageURL(); URLConnection con; try { con = url.openConnection(); con.setConnectTimeout(Constants.FIRMWARE_UPGRADE_CONNECTIVITY_TIMEOUT); con.setReadTimeout(Constants.FIRMWARE_UPGRADE_READ_TIMEOUT); return (long) con.getContentLength(); } catch (SocketTimeoutException e) { String message = "Connection failure (Socket timeout) when retrieving update package size."; Log.e(TAG, message + e); CommonUtils.sendBroadcast(context, operation, Constants.Code.FAILURE, Constants.Status.CONNECTION_FAILED, message); CommonUtils.callAgentApp(context, Constants.Operation.FAILED_FIRMWARE_UPGRADE_NOTIFICATION, 0, null); return (long) -1; } catch (IOException e) { String message = "Connection failure when retrieving update package size."; Log.e(TAG, message + e); CommonUtils.sendBroadcast(context, operation, Constants.Code.FAILURE, Constants.Status.CONNECTION_FAILED, message); CommonUtils.callAgentApp(context, Constants.Operation.FAILED_FIRMWARE_UPGRADE_NOTIFICATION, 0, null); return (long) -1; } } protected void onPostExecute(Long bytes) { Log.i(TAG, "New release found " + Build.VERSION.RELEASE + ", " + Build.ID); String length = "Unknown"; if (bytes > 0) { length = byteCountToDisplaySize(bytes, false); } Log.i(TAG, "version :" + parser.getProp("ro.build.id") + "\n" + "full_version :" + parser.getProp("ro.build.description") + "\n" + "size : " + length); //Downloading the new update package if a new version is available. if (Preference.getBoolean(context, context.getResources().getString(R.string. firmware_status_check_in_progress))) { JSONObject result = new JSONObject(); try { result.put(UPGRADE_AVAILABLE, true); result.put(UPGRADE_SIZE, length); result.put(UPGRADE_RELEASE, parser.getNumRelease()); result.put(UPGRADE_VERSION, parser.getProp("ro.build.id")); result.put(UPGRADE_DESCRIPTION, parser.getProp("ro.build.description")); CommonUtils.sendBroadcast(context, Constants.Operation.GET_FIRMWARE_UPGRADE_PACKAGE_STATUS, Constants.Code.SUCCESS, Constants.Status.SUCCESSFUL, result.toString()); } catch (JSONException e) { String message = "Result payload build failed."; CommonUtils.sendBroadcast(context, Constants.Operation.GET_FIRMWARE_UPGRADE_PACKAGE_STATUS, Constants.Code.FAILURE, Constants.Status.OTA_IMAGE_VERIFICATION_FAILED, message); Log.e(TAG, message + e); } } else { if (checkNetworkOnline()) { Boolean isAutomaticRetry = (Preference.hasPreferenceKey(context, context.getResources() .getString(R.string.firmware_upgrade_automatic_retry)) && Preference.getBoolean(context, context.getResources() .getString(R.string.firmware_upgrade_automatic_retry))) || !Preference.hasPreferenceKey(context, context.getResources() .getString(R.string.firmware_upgrade_automatic_retry)); if (getBatteryLevel(context) >= Constants.REQUIRED_BATTERY_LEVEL_TO_FIRMWARE_UPGRADE) { otaServerManager.startDownloadUpgradePackage(otaServerManager); } else if (isAutomaticRetry) { String message = "Upgrade download has been differed due to insufficient battery level."; Log.w(TAG, message); Preference.putString(context, context.getResources().getString(R.string.upgrade_download_status), Constants.Status.BATTERY_LEVEL_INSUFFICIENT_TO_DOWNLOAD); CommonUtils.sendBroadcast(context, Constants.Operation.UPGRADE_FIRMWARE, Constants.Code.PENDING, Constants.Status.BATTERY_LEVEL_INSUFFICIENT_TO_DOWNLOAD, message); } else { String message = "Upgrade download has been failed due to insufficient battery level."; Preference.putString(context, context.getResources().getString(R.string.upgrade_download_status), Constants.Status.BATTERY_LEVEL_INSUFFICIENT_TO_DOWNLOAD); Log.e(TAG, message); CommonUtils.sendBroadcast(context, Constants.Operation.UPGRADE_FIRMWARE, Constants.Code.FAILURE, Constants.Status.BATTERY_LEVEL_INSUFFICIENT_TO_DOWNLOAD, message); CommonUtils.callAgentApp(context, Constants.Operation.FIRMWARE_UPGRADE_FAILURE, 0, message); } } else { String message = "Connection failure when starting upgrade download."; Log.e(TAG, message); CommonUtils.sendBroadcast(context, Constants.Operation.UPGRADE_FIRMWARE, Constants.Code.FAILURE, Constants.Status.NETWORK_UNREACHABLE, message); CommonUtils.callAgentApp(context, Constants.Operation.FAILED_FIRMWARE_UPGRADE_NOTIFICATION, 0, message); } } } }.execute(); } else { String message = "Connection failure when starting build prop download."; Log.e(TAG, message); CommonUtils.sendBroadcast(context, operation, Constants.Code.FAILURE, Constants.Status.CONNECTION_FAILED, message); CommonUtils.callAgentApp(context, Constants.Operation.FAILED_FIRMWARE_UPGRADE_NOTIFICATION, 0, null); } } else if (error == ERROR_WIFI_NOT_AVAILABLE) { Preference.putString(context, context.getResources().getString(R.string.upgrade_download_status), Constants.Status.WIFI_OFF); Log.e(TAG, "OTA failed due to WIFI connection failure."); } else if (error == ERROR_CANNOT_FIND_SERVER) { String message = "OTA failed due to OTA server not accessible."; Log.e(TAG, message); } else if (error == ERROR_WRITE_FILE_ERROR) { String message = "OTA failed due to file write error."; Log.e(TAG, message); } } public void onStateDownload(int error, Object info) { Log.i(TAG, "Printing package information " + info.toString()); if (error == ERROR_CANNOT_FIND_SERVER) { Log.e(TAG, "Error: server does not have an upgrade package."); } else if (error == ERROR_WRITE_FILE_ERROR) { Log.e(TAG, "OTA failed due to file write error."); } if (error == 0) { // Success download, trying to install package. otaServerManager.startVerifyUpgradePackage(); try { Thread.sleep(10000); } catch (InterruptedException e) { Log.e(TAG, "Thread interrupted." + e); } finally { if (!Preference.getBoolean(context, context.getResources().getString(R.string.verification_failed_flag))) { otaServerManager.startInstallUpgradePackage(); } } } } public void onStateUpgrade(int error) { String operation = Preference.getBoolean(context, context.getResources().getString(R.string. firmware_status_check_in_progress)) ? Constants.Operation.GET_FIRMWARE_UPGRADE_PACKAGE_STATUS : Constants.Operation.UPGRADE_FIRMWARE; if (error == ERROR_PACKAGE_VERIFY_FAILED) { String message = "Package verification failed, signature does not match."; Log.e(TAG, message); Preference.putBoolean(context, context.getResources().getString(R.string.verification_failed_flag), true); CommonUtils.sendBroadcast(context, operation, Constants.Code.FAILURE, Constants.Status.OTA_IMAGE_VERIFICATION_FAILED, message); CommonUtils.callAgentApp(context, Constants.Operation.FAILED_FIRMWARE_UPGRADE_NOTIFICATION, 0, null); } else if (error == ERROR_PACKAGE_INSTALL_FAILED) { String message = "Package installation Failed."; Log.e(TAG, message); CommonUtils.sendBroadcast(context, operation, Constants.Code.FAILURE, Constants.Status.OTA_IMAGE_INSTALL_FAILED, message); CommonUtils.callAgentApp(context, Constants.Operation.FAILED_FIRMWARE_UPGRADE_NOTIFICATION, 0, null); } } public void onProgress(Long progress) { Log.v(TAG, "Progress : " + progress); } }
/** * Copyright 2010 CosmoCode GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.cosmocode.palava.services.mail; import java.io.File; import java.io.FileNotFoundException; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.activation.FileDataSource; import org.apache.commons.lang.StringUtils; import org.apache.commons.mail.Email; import org.apache.commons.mail.EmailException; import org.apache.commons.mail.HtmlEmail; import org.apache.commons.mail.SimpleEmail; import org.jdom.Document; import org.jdom.Element; /** * Creates org.apache.commons.mail.Email Objects from XML-Templates. * * @deprecated no need to use anymore * @author schoenborn@cosmocode.de */ @Deprecated class EmailFactory { private static final EmailFactory INSTANCE = new EmailFactory(); private static final String CHARSET = "UTF-8"; private static final String EMAIL_SEPARATOR = ";"; protected EmailFactory() { } /* CHECKSTYLE:OFF */ @SuppressWarnings("unchecked") Email build(Document document, Embedder embed) throws EmailException, FileNotFoundException { /* CHECKSTYLE:ON */ final Element root = document.getRootElement(); final List<Element> messages = root.getChildren("message"); if (messages.isEmpty()) throw new IllegalArgumentException("No messages found"); final List<Element> attachments = root.getChildren("attachment"); final Map<ContentType, String> available = new HashMap<ContentType, String>(); for (Element element : messages) { final String type = element.getAttributeValue("type"); final ContentType messageType = StringUtils.equals(type, "html") ? ContentType.HTML : ContentType.PLAIN; if (available.containsKey(messageType)) { throw new IllegalArgumentException("Two messages with the same types have been defined."); } available.put(messageType, element.getText()); } final Email email; if (available.containsKey(ContentType.HTML) || attachments.size() > 0) { final HtmlEmail htmlEmail = new HtmlEmail(); htmlEmail.setCharset(CHARSET); if (embed.hasEmbeddings()) { htmlEmail.setSubType("related"); } else if (attachments.size() > 0) { htmlEmail.setSubType("related"); } else { htmlEmail.setSubType("alternative"); } /** * Add html message */ if (available.containsKey(ContentType.HTML)) { htmlEmail.setHtmlMsg(available.get(ContentType.HTML)); } /** * Add plain text alternative */ if (available.containsKey(ContentType.PLAIN)) { htmlEmail.setTextMsg(available.get(ContentType.PLAIN)); } /** * Embedded binary data */ for (Map.Entry<String, String> entry : embed.getEmbeddings().entrySet()) { final String path = entry.getKey(); final String cid = entry.getValue(); final String name = embed.name(path); final File file; if (path.startsWith(File.separator)) { file = new File(path); } else { file = new File(embed.getResourcePath(), path); } if (file.exists()) { htmlEmail.embed(new FileDataSource(file), name, cid); } else { throw new FileNotFoundException(file.getAbsolutePath()); } } /** * Attached binary data */ for (Element attachment : attachments) { final String name = attachment.getAttributeValue("name", ""); final String description = attachment.getAttributeValue("description", ""); final String path = attachment.getAttributeValue("path"); if (path == null) throw new IllegalArgumentException("Attachment path was not set"); File file = new File(path); if (!file.exists()) file = new File(embed.getResourcePath(), path); if (file.exists()) { htmlEmail.attach(new FileDataSource(file), name, description); } else { throw new FileNotFoundException(file.getAbsolutePath()); } } email = htmlEmail; } else if (available.containsKey(ContentType.PLAIN)) { email = new SimpleEmail(); email.setCharset(CHARSET); email.setMsg(available.get(ContentType.PLAIN)); } else { throw new IllegalArgumentException("No valid message found in template."); } final String subject = root.getChildText("subject"); email.setSubject(subject); final Element from = root.getChild("from"); final String fromAddress = from == null ? null : from.getText(); final String fromName = from == null ? fromAddress : from.getAttributeValue("name", fromAddress); email.setFrom(fromAddress, fromName); final Element to = root.getChild("to"); if (to != null) { final String toAddress = to.getText(); if (StringUtils.isNotBlank(toAddress) && toAddress.contains(EMAIL_SEPARATOR)) { final String[] toAddresses = toAddress.split(EMAIL_SEPARATOR); for (String address : toAddresses) { email.addTo(address); } } else if (StringUtils.isNotBlank(toAddress)) { final String toName = to.getAttributeValue("name", toAddress); email.addTo(toAddress, toName); } } final Element cc = root.getChild("cc"); if (cc != null) { final String ccAddress = cc.getText(); if (StringUtils.isNotBlank(ccAddress) && ccAddress.contains(EMAIL_SEPARATOR)) { final String[] ccAddresses = ccAddress.split(EMAIL_SEPARATOR); for (String address : ccAddresses) { email.addCc(address); } } else if (StringUtils.isNotBlank(ccAddress)) { final String ccName = cc.getAttributeValue("name", ccAddress); email.addCc(ccAddress, ccName); } } final Element bcc = root.getChild("bcc"); if (bcc != null) { final String bccAddress = bcc.getText(); if (StringUtils.isNotBlank(bccAddress) && bccAddress.contains(EMAIL_SEPARATOR)) { final String[] bccAddresses = bccAddress.split(EMAIL_SEPARATOR); for (String address : bccAddresses) { email.addBcc(address); } } else if (StringUtils.isNotBlank(bccAddress)) { final String bccName = bcc.getAttributeValue("name", bccAddress); email.addBcc(bccAddress, bccName); } } final Element replyTo = root.getChild("replyTo"); if (replyTo != null) { final String replyToAddress = replyTo.getText(); if (StringUtils.isNotBlank(replyToAddress) && replyToAddress.contains(EMAIL_SEPARATOR)) { final String[] replyToAddresses = replyToAddress.split(EMAIL_SEPARATOR); for (String address : replyToAddresses) { email.addReplyTo(address); } } else if (StringUtils.isNotBlank(replyToAddress)) { final String replyToName = replyTo.getAttributeValue("name", replyToAddress); email.addReplyTo(replyToAddress, replyToName); } } return email; } public static final EmailFactory getInstance() { return INSTANCE; } }
/* * Player Java Client 3 - OpaqueInterface.java * Copyright (C) 2011 Dustin Webb * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * * $Id: OpaqueInterface.java 145 2011-08-22 14:25:17Z corot $ * */ package javaclient3; import java.io.IOException; import java.util.logging.Level; import java.util.logging.Logger; import javaclient3.structures.PlayerMsgHdr; import javaclient3.structures.opaque.PlayerOpaqueData; import javaclient3.xdr.OncRpcException; import javaclient3.xdr.XdrBufferDecodingStream; import javaclient3.xdr.XdrBufferEncodingStream; /** * @brief A generic interface for user-defined messages. * The opaque interface allows you to send user-specified messages. With * this interface a user can send custom commands to their drivers/plugins. * @author Dustin Webb * @version * <ul> * <li>v3.0 - Player 3.0 supported * </ul> */ public class OpaqueInterface extends PlayerDevice { private static final boolean isDebugging = PlayerClient.isDebugging; // Logging support private Logger logger = Logger.getLogger (OpaqueInterface.class.getName ()); private PlayerOpaqueData podata; private boolean readyPodata = false; /** * Constructor for OpaqueInterface. * @param pc a reference to the PlayerClient object */ public OpaqueInterface (PlayerClient pc) { super (pc); } /** * Read the data packet. */ public synchronized void readData (PlayerMsgHdr header) { super.readData(header); try { switch (header.getSubtype()) { case PLAYER_OPAQUE_DATA_STATE: { readData (); break; } } } catch (IOException e) { throw new PlayerException ("[Opaque] : Error reading payload: " + e.toString(), e); } catch (OncRpcException e) { throw new PlayerException ("[Opaque] : Error while XDR-decoding payload: " + e.toString(), e); } } /** * Returns the opaque data * @return a PlayerOpaqueData object containing a byte array. */ public synchronized PlayerOpaqueData getData() { return podata; } /** * Check if data is available. * @return true if ready, false if not ready */ public boolean isDataReady () { if (readyPodata) { readyPodata = false; return true; } return false; } /** * Command subtype: generic command. * @param data The data to send. */ public void command (byte[] data) { try { sendData (data, PLAYER_OPAQUE_CMD_DATA); } catch (OncRpcException e) { throw new PlayerException ("[Opaque] : Couldn't send data command: " + e.toString(), e); } catch (IOException e) { throw new PlayerException ("[Opaque] : Error while XDR-encoding data command: " + e.toString(), e); } } /** * Request/reply: generic request. * @param data The data to send. */ public void request (byte[] data) { try { sendData (data, PLAYER_OPAQUE_REQ_DATA); } catch (OncRpcException e) { throw new PlayerException ("[Opaque] : Couldn't send data request: " + e.toString(), e); } catch (IOException e) { throw new PlayerException ("[Opaque] : Error while XDR-encoding data request: " + e.toString(), e); } } /** * Handle acknowledgement response messages * @param header Player header */ public void handleResponse (PlayerMsgHdr header) { try { switch (header.getSubtype ()) { case PLAYER_OPAQUE_REQ_DATA: { readData (); break; } default:{ if (isDebugging) logger.log (Level.FINEST, "[Opaque][Debug] : " + "Unexpected response " + header.getSubtype () + " of size = " + header.getSize ()); break; } } } catch (IOException e) { throw new PlayerException ("[Opaque] : Error reading payload: " + e.toString(), e); } catch (OncRpcException e) { throw new PlayerException ("[Opaque] : Error while XDR-decoding payload: " + e.toString(), e); } } /** * Send user-defined data to the server. * @param data The data to send. * @param subtype Command or request. * @throws OncRpcException if an ONC/RPC error occurs. * @throws IOException if an I/O error occurs. */ private void sendData (byte[] data, int subtype) throws OncRpcException, IOException { sendHeader (PLAYER_MSGTYPE_CMD, subtype, 8 + data.length); XdrBufferEncodingStream xdr = new XdrBufferEncodingStream (8 + data.length); xdr.beginEncoding (null, 0); xdr.xdrEncodeInt (data.length); xdr.xdrEncodeInt (data.length); xdr.xdrEncodeOpaque (data); xdr.endEncoding (); os.write (xdr.getXdrData(), 0, xdr.getXdrLength()); xdr.close (); os.flush (); } /** * Read user-defined data from the server. * @throws OncRpcException if an ONC/RPC error occurs. * @throws IOException if an I/O error occurs. */ private void readData () throws OncRpcException, IOException { podata = new PlayerOpaqueData (); // Buffer for data_count byte[] buffer = new byte[8]; // Read data_count; skip array_count is.readFully (buffer, 0, 8); // Begin decoding the XDR buffer XdrBufferDecodingStream xdr = new XdrBufferDecodingStream (buffer); xdr.beginDecoding (); int dataCount = xdr.xdrDecodeInt (); xdr.endDecoding (); xdr.close (); // Buffer for reading data buffer = new byte[dataCount]; // Read generic data is.readFully (buffer, 0, dataCount); // Begin decoding the XDR buffer xdr = new XdrBufferDecodingStream (buffer); xdr.beginDecoding (); podata.setData (xdr.xdrDecodeOpaque (dataCount)); xdr.endDecoding (); xdr.close (); readyPodata = true; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.engine.impl.persistence.entity; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.activiti.bpmn.model.ActivitiListener; import org.activiti.engine.ActivitiException; import org.activiti.engine.ProcessEngineConfiguration; import org.activiti.engine.delegate.event.ActivitiEventType; import org.activiti.engine.delegate.event.impl.ActivitiEventBuilder; import org.activiti.engine.impl.context.Context; import org.activiti.engine.impl.db.BulkDeleteable; import org.activiti.engine.impl.interceptor.CommandContext; import org.activiti.engine.task.DelegationState; import org.activiti.engine.task.IdentityLink; import org.activiti.engine.task.IdentityLinkType; /** * @author Tom Baeyens * @author Joram Barrez * @author Falko Menge * @author Tijs Rademakers */ public class TaskEntityImpl extends VariableScopeImpl implements TaskEntity, Serializable, BulkDeleteable { public static final String DELETE_REASON_COMPLETED = "completed"; public static final String DELETE_REASON_DELETED = "deleted"; private static final long serialVersionUID = 1L; protected String owner; protected int assigneeUpdatedCount; // needed for v5 compatibility protected String originalAssignee; // needed for v5 compatibility protected String assignee; protected DelegationState delegationState; protected String parentTaskId; protected String name; protected String localizedName; protected String description; protected String localizedDescription; protected int priority = DEFAULT_PRIORITY; protected Date createTime; // The time when the task has been created protected Date dueDate; protected int suspensionState = SuspensionState.ACTIVE.getStateCode(); protected String category; protected boolean isIdentityLinksInitialized; protected List<IdentityLinkEntity> taskIdentityLinkEntities = new ArrayList<IdentityLinkEntity>(); protected String executionId; protected ExecutionEntity execution; protected String processInstanceId; protected ExecutionEntity processInstance; protected String processDefinitionId; protected String taskDefinitionKey; protected String formKey; protected boolean isDeleted; protected boolean isCanceled; protected String eventName; protected ActivitiListener currentActivitiListener; protected String tenantId = ProcessEngineConfiguration.NO_TENANT_ID; protected List<VariableInstanceEntity> queryVariables; protected boolean forcedUpdate; protected Date claimTime; public TaskEntityImpl() { } public Object getPersistentState() { Map<String, Object> persistentState = new HashMap<String, Object>(); persistentState.put("assignee", this.assignee); persistentState.put("owner", this.owner); persistentState.put("name", this.name); persistentState.put("priority", this.priority); if (executionId != null) { persistentState.put("executionId", this.executionId); } if (processDefinitionId != null) { persistentState.put("processDefinitionId", this.processDefinitionId); } if (createTime != null) { persistentState.put("createTime", this.createTime); } if (description != null) { persistentState.put("description", this.description); } if (dueDate != null) { persistentState.put("dueDate", this.dueDate); } if (parentTaskId != null) { persistentState.put("parentTaskId", this.parentTaskId); } if (delegationState != null) { persistentState.put("delegationState", this.delegationState); } persistentState.put("suspensionState", this.suspensionState); if (forcedUpdate) { persistentState.put("forcedUpdate", Boolean.TRUE); } if (claimTime != null) { persistentState.put("claimTime", this.claimTime); } return persistentState; } public int getRevisionNext() { return revision + 1; } public void forceUpdate() { this.forcedUpdate = true; } // variables ////////////////////////////////////////////////////////////////// @Override protected VariableScopeImpl getParentVariableScope() { if (getExecution() != null) { return (ExecutionEntityImpl) execution; } return null; } @Override protected void initializeVariableInstanceBackPointer(VariableInstanceEntity variableInstance) { variableInstance.setTaskId(id); variableInstance.setExecutionId(executionId); variableInstance.setProcessInstanceId(processInstanceId); } @Override protected List<VariableInstanceEntity> loadVariableInstances() { return Context.getCommandContext().getVariableInstanceEntityManager().findVariableInstancesByTaskId(id); } @Override protected VariableInstanceEntity createVariableInstance(String variableName, Object value, ExecutionEntity sourceActivityExecution) { VariableInstanceEntity result = super.createVariableInstance(variableName, value, sourceActivityExecution); // Dispatch event, if needed if (Context.getProcessEngineConfiguration() != null && Context.getProcessEngineConfiguration().getEventDispatcher().isEnabled()) { Context .getProcessEngineConfiguration() .getEventDispatcher() .dispatchEvent( ActivitiEventBuilder.createVariableEvent(ActivitiEventType.VARIABLE_CREATED, variableName, value, result.getType(), result.getTaskId(), result.getExecutionId(), getProcessInstanceId(), getProcessDefinitionId())); } return result; } @Override protected void updateVariableInstance(VariableInstanceEntity variableInstance, Object value, ExecutionEntity sourceActivityExecution) { super.updateVariableInstance(variableInstance, value, sourceActivityExecution); // Dispatch event, if needed if (Context.getProcessEngineConfiguration() != null && Context.getProcessEngineConfiguration().getEventDispatcher().isEnabled()) { Context .getProcessEngineConfiguration() .getEventDispatcher() .dispatchEvent( ActivitiEventBuilder.createVariableEvent(ActivitiEventType.VARIABLE_UPDATED, variableInstance.getName(), value, variableInstance.getType(), variableInstance.getTaskId(), variableInstance.getExecutionId(), getProcessInstanceId(), getProcessDefinitionId())); } } // execution ////////////////////////////////////////////////////////////////// public ExecutionEntity getExecution() { if ((execution == null) && (executionId != null)) { this.execution = Context.getCommandContext().getExecutionEntityManager().findById(executionId); } return execution; } // task assignment //////////////////////////////////////////////////////////// @Override public void addCandidateUser(String userId) { Context.getCommandContext().getIdentityLinkEntityManager().addCandidateUser(this, userId); } @Override public void addCandidateUsers(Collection<String> candidateUsers) { Context.getCommandContext().getIdentityLinkEntityManager().addCandidateUsers(this, candidateUsers); } @Override public void addCandidateGroup(String groupId) { Context.getCommandContext().getIdentityLinkEntityManager().addCandidateGroup(this, groupId); } @Override public void addCandidateGroups(Collection<String> candidateGroups) { Context.getCommandContext().getIdentityLinkEntityManager().addCandidateGroups(this, candidateGroups); } @Override public void addUserIdentityLink(String userId, String identityLinkType) { Context.getCommandContext().getIdentityLinkEntityManager().addUserIdentityLink(this, userId, identityLinkType); } @Override public void addGroupIdentityLink(String groupId, String identityLinkType) { Context.getCommandContext().getIdentityLinkEntityManager().addGroupIdentityLink(this, groupId, identityLinkType); } public Set<IdentityLink> getCandidates() { Set<IdentityLink> potentialOwners = new HashSet<IdentityLink>(); for (IdentityLinkEntity identityLinkEntity : getIdentityLinks()) { if (IdentityLinkType.CANDIDATE.equals(identityLinkEntity.getType())) { potentialOwners.add(identityLinkEntity); } } return potentialOwners; } public void deleteCandidateGroup(String groupId) { deleteGroupIdentityLink(groupId, IdentityLinkType.CANDIDATE); } public void deleteCandidateUser(String userId) { deleteUserIdentityLink(userId, IdentityLinkType.CANDIDATE); } public void deleteGroupIdentityLink(String groupId, String identityLinkType) { if (groupId != null) { Context.getCommandContext().getIdentityLinkEntityManager().deleteIdentityLink(this, null, groupId, identityLinkType); } } public void deleteUserIdentityLink(String userId, String identityLinkType) { if (userId != null) { Context.getCommandContext().getIdentityLinkEntityManager().deleteIdentityLink(this, userId, null, identityLinkType); } } public List<IdentityLinkEntity> getIdentityLinks() { if (!isIdentityLinksInitialized) { taskIdentityLinkEntities = Context.getCommandContext().getIdentityLinkEntityManager().findIdentityLinksByTaskId(id); isIdentityLinksInitialized = true; } return taskIdentityLinkEntities; } public void setExecutionVariables(Map<String, Object> parameters) { if (getExecution() != null) { execution.setVariables(parameters); } } public void setName(String taskName) { this.name = taskName; } public void setDescription(String description) { this.description = description; } public void setAssignee(String assignee) { this.originalAssignee = this.assignee; this.assignee = assignee; assigneeUpdatedCount++; } public void setOwner(String owner) { this.owner = owner; } public void setDueDate(Date dueDate) { this.dueDate = dueDate; } public void setPriority(int priority) { this.priority = priority; } public void setCategory(String category) { this.category = category; } public void setParentTaskId(String parentTaskId) { this.parentTaskId = parentTaskId; } public String getFormKey() { return formKey; } public void setFormKey(String formKey) { this.formKey = formKey; } // Override from VariableScopeImpl @Override protected boolean isActivityIdUsedForDetails() { return false; } // Overridden to avoid fetching *all* variables (as is the case in the super // call) @Override protected VariableInstanceEntity getSpecificVariable(String variableName) { CommandContext commandContext = Context.getCommandContext(); if (commandContext == null) { throw new ActivitiException("lazy loading outside command context"); } VariableInstanceEntity variableInstance = commandContext.getVariableInstanceEntityManager().findVariableInstanceByTaskAndName(id, variableName); return variableInstance; } @Override protected List<VariableInstanceEntity> getSpecificVariables(Collection<String> variableNames) { CommandContext commandContext = Context.getCommandContext(); if (commandContext == null) { throw new ActivitiException("lazy loading outside command context"); } return commandContext.getVariableInstanceEntityManager().findVariableInstancesByTaskAndNames(id, variableNames); } // regular getters and setters //////////////////////////////////////////////////////// public int getRevision() { return revision; } public void setRevision(int revision) { this.revision = revision; } public String getName() { if (localizedName != null && localizedName.length() > 0) { return localizedName; } else { return name; } } public String getLocalizedName() { return localizedName; } public void setLocalizedName(String localizedName) { this.localizedName = localizedName; } public String getDescription() { if (localizedDescription != null && localizedDescription.length() > 0) { return localizedDescription; } else { return description; } } public String getLocalizedDescription() { return localizedDescription; } public void setLocalizedDescription(String localizedDescription) { this.localizedDescription = localizedDescription; } public Date getDueDate() { return dueDate; } public int getPriority() { return priority; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public String getExecutionId() { return executionId; } public String getProcessInstanceId() { return processInstanceId; } public String getProcessDefinitionId() { return processDefinitionId; } public void setProcessDefinitionId(String processDefinitionId) { this.processDefinitionId = processDefinitionId; } public String getAssignee() { return assignee; } public String getOriginalAssignee() { // Don't ask. A stupid hack for v5 compatibility if (assigneeUpdatedCount > 1) { return originalAssignee; } else { return assignee; } } public String getTaskDefinitionKey() { return taskDefinitionKey; } public void setTaskDefinitionKey(String taskDefinitionKey) { this.taskDefinitionKey = taskDefinitionKey; } public String getEventName() { return eventName; } public void setEventName(String eventName) { this.eventName = eventName; } public ActivitiListener getCurrentActivitiListener() { return currentActivitiListener; } public void setCurrentActivitiListener(ActivitiListener currentActivitiListener) { this.currentActivitiListener = currentActivitiListener; } public void setExecutionId(String executionId) { this.executionId = executionId; } public ExecutionEntity getProcessInstance() { if (processInstance == null && processInstanceId != null) { processInstance = Context.getCommandContext().getExecutionEntityManager().findById(processInstanceId); } return processInstance; } public void setProcessInstance(ExecutionEntity processInstance) { this.processInstance = processInstance; } public void setExecution(ExecutionEntity execution) { this.execution = execution; } public void setProcessInstanceId(String processInstanceId) { this.processInstanceId = processInstanceId; } public String getOwner() { return owner; } public DelegationState getDelegationState() { return delegationState; } public void setDelegationState(DelegationState delegationState) { this.delegationState = delegationState; } public String getDelegationStateString() { //Needed for Activiti 5 compatibility, not exposed in terface return (delegationState != null ? delegationState.toString() : null); } public void setDelegationStateString(String delegationStateString) { this.delegationState = (delegationStateString != null ? DelegationState.valueOf(DelegationState.class, delegationStateString) : null); } public boolean isDeleted() { return isDeleted; } public void setDeleted(boolean isDeleted) { this.isDeleted = isDeleted; } public boolean isCanceled() { return isCanceled; } public void setCanceled(boolean isCanceled) { this.isCanceled = isCanceled; } public String getParentTaskId() { return parentTaskId; } public Map<String, VariableInstanceEntity> getVariableInstanceEntities() { ensureVariableInstancesInitialized(); return variableInstances; } public int getSuspensionState() { return suspensionState; } public void setSuspensionState(int suspensionState) { this.suspensionState = suspensionState; } public String getCategory() { return category; } public boolean isSuspended() { return suspensionState == SuspensionState.SUSPENDED.getStateCode(); } public Map<String, Object> getTaskLocalVariables() { Map<String, Object> variables = new HashMap<String, Object>(); if (queryVariables != null) { for (VariableInstanceEntity variableInstance : queryVariables) { if (variableInstance.getId() != null && variableInstance.getTaskId() != null) { variables.put(variableInstance.getName(), variableInstance.getValue()); } } } return variables; } public Map<String, Object> getProcessVariables() { Map<String, Object> variables = new HashMap<String, Object>(); if (queryVariables != null) { for (VariableInstanceEntity variableInstance : queryVariables) { if (variableInstance.getId() != null && variableInstance.getTaskId() == null) { variables.put(variableInstance.getName(), variableInstance.getValue()); } } } return variables; } public String getTenantId() { return tenantId; } public void setTenantId(String tenantId) { this.tenantId = tenantId; } public List<VariableInstanceEntity> getQueryVariables() { if (queryVariables == null && Context.getCommandContext() != null) { queryVariables = new VariableInitializingList(); } return queryVariables; } public void setQueryVariables(List<VariableInstanceEntity> queryVariables) { this.queryVariables = queryVariables; } public Date getClaimTime() { return claimTime; } public void setClaimTime(Date claimTime) { this.claimTime = claimTime; } public String toString() { return "Task[id=" + id + ", name=" + name + "]"; } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.compute.v2017_12_01.implementation; import com.microsoft.azure.AzureEnvironment; import com.microsoft.azure.AzureResponseBuilder; import com.microsoft.azure.credentials.AzureTokenCredentials; import com.microsoft.azure.management.apigeneration.Beta; import com.microsoft.azure.management.apigeneration.Beta.SinceVersion; import com.microsoft.azure.arm.resources.AzureConfigurable; import com.microsoft.azure.serializer.AzureJacksonAdapter; import com.microsoft.rest.RestClient; import com.microsoft.azure.management.compute.v2017_12_01.Operations; import com.microsoft.azure.management.compute.v2017_12_01.AvailabilitySets; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachineExtensionImages; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachineExtensions; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachines; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachineImages; import com.microsoft.azure.management.compute.v2017_12_01.Usages; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachineSizes; import com.microsoft.azure.management.compute.v2017_12_01.Images; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachineScaleSets; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachineScaleSetExtensions; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachineScaleSetRollingUpgrades; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachineScaleSetVMs; import com.microsoft.azure.management.compute.v2017_12_01.LogAnalytics; import com.microsoft.azure.management.compute.v2017_12_01.VirtualMachineRunCommands; import com.microsoft.azure.arm.resources.implementation.AzureConfigurableCoreImpl; import com.microsoft.azure.arm.resources.implementation.ManagerCore; /** * Entry point to Azure Compute resource management. */ public final class ComputeManager extends ManagerCore<ComputeManager, ComputeManagementClientImpl> { private Operations operations; private AvailabilitySets availabilitySets; private VirtualMachineExtensionImages virtualMachineExtensionImages; private VirtualMachineExtensions virtualMachineExtensions; private VirtualMachines virtualMachines; private VirtualMachineImages virtualMachineImages; private Usages usages; private VirtualMachineSizes virtualMachineSizes; private Images images; private VirtualMachineScaleSets virtualMachineScaleSets; private VirtualMachineScaleSetExtensions virtualMachineScaleSetExtensions; private VirtualMachineScaleSetRollingUpgrades virtualMachineScaleSetRollingUpgrades; private VirtualMachineScaleSetVMs virtualMachineScaleSetVMs; private LogAnalytics logAnalytics; private VirtualMachineRunCommands virtualMachineRunCommands; /** * Get a Configurable instance that can be used to create ComputeManager with optional configuration. * * @return the instance allowing configurations */ public static Configurable configure() { return new ComputeManager.ConfigurableImpl(); } /** * Creates an instance of ComputeManager that exposes Compute resource management API entry points. * * @param credentials the credentials to use * @param subscriptionId the subscription UUID * @return the ComputeManager */ public static ComputeManager authenticate(AzureTokenCredentials credentials, String subscriptionId) { return new ComputeManager(new RestClient.Builder() .withBaseUrl(credentials.environment(), AzureEnvironment.Endpoint.RESOURCE_MANAGER) .withCredentials(credentials) .withSerializerAdapter(new AzureJacksonAdapter()) .withResponseBuilderFactory(new AzureResponseBuilder.Factory()) .build(), subscriptionId); } /** * Creates an instance of ComputeManager that exposes Compute resource management API entry points. * * @param restClient the RestClient to be used for API calls. * @param subscriptionId the subscription UUID * @return the ComputeManager */ public static ComputeManager authenticate(RestClient restClient, String subscriptionId) { return new ComputeManager(restClient, subscriptionId); } /** * The interface allowing configurations to be set. */ public interface Configurable extends AzureConfigurable<Configurable> { /** * Creates an instance of ComputeManager that exposes Compute management API entry points. * * @param credentials the credentials to use * @param subscriptionId the subscription UUID * @return the interface exposing Compute management API entry points that work across subscriptions */ ComputeManager authenticate(AzureTokenCredentials credentials, String subscriptionId); } /** * @return Entry point to manage Operations. */ public Operations operations() { if (this.operations == null) { this.operations = new OperationsImpl(this); } return this.operations; } /** * @return Entry point to manage AvailabilitySets. */ public AvailabilitySets availabilitySets() { if (this.availabilitySets == null) { this.availabilitySets = new AvailabilitySetsImpl(this); } return this.availabilitySets; } /** * @return Entry point to manage VirtualMachineExtensionImages. */ public VirtualMachineExtensionImages virtualMachineExtensionImages() { if (this.virtualMachineExtensionImages == null) { this.virtualMachineExtensionImages = new VirtualMachineExtensionImagesImpl(this); } return this.virtualMachineExtensionImages; } /** * @return Entry point to manage VirtualMachineExtensions. */ public VirtualMachineExtensions virtualMachineExtensions() { if (this.virtualMachineExtensions == null) { this.virtualMachineExtensions = new VirtualMachineExtensionsImpl(this); } return this.virtualMachineExtensions; } /** * @return Entry point to manage VirtualMachines. */ public VirtualMachines virtualMachines() { if (this.virtualMachines == null) { this.virtualMachines = new VirtualMachinesImpl(this); } return this.virtualMachines; } /** * @return Entry point to manage VirtualMachineImages. */ public VirtualMachineImages virtualMachineImages() { if (this.virtualMachineImages == null) { this.virtualMachineImages = new VirtualMachineImagesImpl(this); } return this.virtualMachineImages; } /** * @return Entry point to manage Usages. */ public Usages usages() { if (this.usages == null) { this.usages = new UsagesImpl(this); } return this.usages; } /** * @return Entry point to manage VirtualMachineSizes. */ public VirtualMachineSizes virtualMachineSizes() { if (this.virtualMachineSizes == null) { this.virtualMachineSizes = new VirtualMachineSizesImpl(this); } return this.virtualMachineSizes; } /** * @return Entry point to manage Images. */ public Images images() { if (this.images == null) { this.images = new ImagesImpl(this); } return this.images; } /** * @return Entry point to manage VirtualMachineScaleSets. */ public VirtualMachineScaleSets virtualMachineScaleSets() { if (this.virtualMachineScaleSets == null) { this.virtualMachineScaleSets = new VirtualMachineScaleSetsImpl(this); } return this.virtualMachineScaleSets; } /** * @return Entry point to manage VirtualMachineScaleSetExtensions. */ public VirtualMachineScaleSetExtensions virtualMachineScaleSetExtensions() { if (this.virtualMachineScaleSetExtensions == null) { this.virtualMachineScaleSetExtensions = new VirtualMachineScaleSetExtensionsImpl(this); } return this.virtualMachineScaleSetExtensions; } /** * @return Entry point to manage VirtualMachineScaleSetRollingUpgrades. */ public VirtualMachineScaleSetRollingUpgrades virtualMachineScaleSetRollingUpgrades() { if (this.virtualMachineScaleSetRollingUpgrades == null) { this.virtualMachineScaleSetRollingUpgrades = new VirtualMachineScaleSetRollingUpgradesImpl(this); } return this.virtualMachineScaleSetRollingUpgrades; } /** * @return Entry point to manage VirtualMachineScaleSetVMs. */ public VirtualMachineScaleSetVMs virtualMachineScaleSetVMs() { if (this.virtualMachineScaleSetVMs == null) { this.virtualMachineScaleSetVMs = new VirtualMachineScaleSetVMsImpl(this); } return this.virtualMachineScaleSetVMs; } /** * @return Entry point to manage LogAnalytics. */ public LogAnalytics logAnalytics() { if (this.logAnalytics == null) { this.logAnalytics = new LogAnalyticsImpl(this); } return this.logAnalytics; } /** * @return Entry point to manage VirtualMachineRunCommands. */ public VirtualMachineRunCommands virtualMachineRunCommands() { if (this.virtualMachineRunCommands == null) { this.virtualMachineRunCommands = new VirtualMachineRunCommandsImpl(this); } return this.virtualMachineRunCommands; } /** * The implementation for Configurable interface. */ private static final class ConfigurableImpl extends AzureConfigurableCoreImpl<Configurable> implements Configurable { public ComputeManager authenticate(AzureTokenCredentials credentials, String subscriptionId) { return ComputeManager.authenticate(buildRestClient(credentials), subscriptionId); } } private ComputeManager(RestClient restClient, String subscriptionId) { super( restClient, subscriptionId, new ComputeManagementClientImpl(restClient).withSubscriptionId(subscriptionId)); } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.epoll; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufAllocator; import io.netty.buffer.CompositeByteBuf; import io.netty.channel.AddressedEnvelope; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelMetadata; import io.netty.channel.ChannelOption; import io.netty.channel.ChannelOutboundBuffer; import io.netty.channel.ChannelPipeline; import io.netty.channel.ChannelPromise; import io.netty.channel.DefaultAddressedEnvelope; import io.netty.channel.RecvByteBufAllocator; import io.netty.channel.socket.DatagramChannel; import io.netty.channel.socket.DatagramChannelConfig; import io.netty.channel.socket.DatagramPacket; import io.netty.channel.unix.FileDescriptor; import io.netty.util.internal.PlatformDependent; import io.netty.util.internal.StringUtil; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.NetworkInterface; import java.net.SocketAddress; import java.net.SocketException; import java.nio.ByteBuffer; import java.nio.channels.NotYetConnectedException; import java.util.ArrayList; import java.util.List; /** * {@link DatagramChannel} implementation that uses linux EPOLL Edge-Triggered Mode for * maximal performance. */ public final class EpollDatagramChannel extends AbstractEpollChannel implements DatagramChannel { private static final ChannelMetadata METADATA = new ChannelMetadata(true); private static final String EXPECTED_TYPES = " (expected: " + StringUtil.simpleClassName(DatagramPacket.class) + ", " + StringUtil.simpleClassName(AddressedEnvelope.class) + '<' + StringUtil.simpleClassName(ByteBuf.class) + ", " + StringUtil.simpleClassName(InetSocketAddress.class) + ">, " + StringUtil.simpleClassName(ByteBuf.class) + ')'; private volatile InetSocketAddress local; private volatile InetSocketAddress remote; private volatile boolean connected; private final EpollDatagramChannelConfig config; public EpollDatagramChannel() { super(Native.socketDgramFd(), Native.EPOLLIN); config = new EpollDatagramChannelConfig(this); } /** * Create a new {@link EpollDatagramChannel} from the given {@link FileDescriptor}. */ public EpollDatagramChannel(FileDescriptor fd) { super(null, fd, Native.EPOLLIN, true); config = new EpollDatagramChannelConfig(this); // As we create an EpollDatagramChannel from a FileDescriptor we should try to obtain the remote and local // address from it. This is needed as the FileDescriptor may be bound already. local = Native.localAddress(fd.intValue()); } @Override public InetSocketAddress remoteAddress() { return (InetSocketAddress) super.remoteAddress(); } @Override public InetSocketAddress localAddress() { return (InetSocketAddress) super.localAddress(); } @Override public ChannelMetadata metadata() { return METADATA; } @Override @SuppressWarnings("deprecation") public boolean isActive() { return fd().isOpen() && (config.getOption(ChannelOption.DATAGRAM_CHANNEL_ACTIVE_ON_REGISTRATION) && isRegistered() || active); } @Override public boolean isConnected() { return connected; } @Override public ChannelFuture joinGroup(InetAddress multicastAddress) { return joinGroup(multicastAddress, newPromise()); } @Override public ChannelFuture joinGroup(InetAddress multicastAddress, ChannelPromise promise) { try { return joinGroup( multicastAddress, NetworkInterface.getByInetAddress(localAddress().getAddress()), null, promise); } catch (SocketException e) { promise.setFailure(e); } return promise; } @Override public ChannelFuture joinGroup( InetSocketAddress multicastAddress, NetworkInterface networkInterface) { return joinGroup(multicastAddress, networkInterface, newPromise()); } @Override public ChannelFuture joinGroup( InetSocketAddress multicastAddress, NetworkInterface networkInterface, ChannelPromise promise) { return joinGroup(multicastAddress.getAddress(), networkInterface, null, promise); } @Override public ChannelFuture joinGroup( InetAddress multicastAddress, NetworkInterface networkInterface, InetAddress source) { return joinGroup(multicastAddress, networkInterface, source, newPromise()); } @Override public ChannelFuture joinGroup( final InetAddress multicastAddress, final NetworkInterface networkInterface, final InetAddress source, final ChannelPromise promise) { if (multicastAddress == null) { throw new NullPointerException("multicastAddress"); } if (networkInterface == null) { throw new NullPointerException("networkInterface"); } promise.setFailure(new UnsupportedOperationException("Multicast not supported")); return promise; } @Override public ChannelFuture leaveGroup(InetAddress multicastAddress) { return leaveGroup(multicastAddress, newPromise()); } @Override public ChannelFuture leaveGroup(InetAddress multicastAddress, ChannelPromise promise) { try { return leaveGroup( multicastAddress, NetworkInterface.getByInetAddress(localAddress().getAddress()), null, promise); } catch (SocketException e) { promise.setFailure(e); } return promise; } @Override public ChannelFuture leaveGroup( InetSocketAddress multicastAddress, NetworkInterface networkInterface) { return leaveGroup(multicastAddress, networkInterface, newPromise()); } @Override public ChannelFuture leaveGroup( InetSocketAddress multicastAddress, NetworkInterface networkInterface, ChannelPromise promise) { return leaveGroup(multicastAddress.getAddress(), networkInterface, null, promise); } @Override public ChannelFuture leaveGroup( InetAddress multicastAddress, NetworkInterface networkInterface, InetAddress source) { return leaveGroup(multicastAddress, networkInterface, source, newPromise()); } @Override public ChannelFuture leaveGroup( final InetAddress multicastAddress, final NetworkInterface networkInterface, final InetAddress source, final ChannelPromise promise) { if (multicastAddress == null) { throw new NullPointerException("multicastAddress"); } if (networkInterface == null) { throw new NullPointerException("networkInterface"); } promise.setFailure(new UnsupportedOperationException("Multicast not supported")); return promise; } @Override public ChannelFuture block( InetAddress multicastAddress, NetworkInterface networkInterface, InetAddress sourceToBlock) { return block(multicastAddress, networkInterface, sourceToBlock, newPromise()); } @Override public ChannelFuture block( final InetAddress multicastAddress, final NetworkInterface networkInterface, final InetAddress sourceToBlock, final ChannelPromise promise) { if (multicastAddress == null) { throw new NullPointerException("multicastAddress"); } if (sourceToBlock == null) { throw new NullPointerException("sourceToBlock"); } if (networkInterface == null) { throw new NullPointerException("networkInterface"); } promise.setFailure(new UnsupportedOperationException("Multicast not supported")); return promise; } @Override public ChannelFuture block(InetAddress multicastAddress, InetAddress sourceToBlock) { return block(multicastAddress, sourceToBlock, newPromise()); } @Override public ChannelFuture block( InetAddress multicastAddress, InetAddress sourceToBlock, ChannelPromise promise) { try { return block( multicastAddress, NetworkInterface.getByInetAddress(localAddress().getAddress()), sourceToBlock, promise); } catch (Throwable e) { promise.setFailure(e); } return promise; } @Override protected AbstractEpollUnsafe newUnsafe() { return new EpollDatagramChannelUnsafe(); } @Override protected InetSocketAddress localAddress0() { return local; } @Override protected InetSocketAddress remoteAddress0() { return remote; } @Override protected void doBind(SocketAddress localAddress) throws Exception { InetSocketAddress addr = (InetSocketAddress) localAddress; checkResolvable(addr); int fd = fd().intValue(); Native.bind(fd, addr); local = Native.localAddress(fd); active = true; } @Override protected void doWrite(ChannelOutboundBuffer in) throws Exception { for (;;) { Object msg = in.current(); if (msg == null) { // Wrote all messages. clearFlag(Native.EPOLLOUT); break; } try { // Check if sendmmsg(...) is supported which is only the case for GLIBC 2.14+ if (Native.IS_SUPPORTING_SENDMMSG && in.size() > 1) { NativeDatagramPacketArray array = NativeDatagramPacketArray.getInstance(in); int cnt = array.count(); if (cnt >= 1) { // Try to use gathering writes via sendmmsg(...) syscall. int offset = 0; NativeDatagramPacketArray.NativeDatagramPacket[] packets = array.packets(); while (cnt > 0) { int send = Native.sendmmsg(fd().intValue(), packets, offset, cnt); if (send == 0) { // Did not write all messages. setFlag(Native.EPOLLOUT); return; } for (int i = 0; i < send; i++) { in.remove(); } cnt -= send; offset += send; } continue; } } boolean done = false; for (int i = config().getWriteSpinCount() - 1; i >= 0; i--) { if (doWriteMessage(msg)) { done = true; break; } } if (done) { in.remove(); } else { // Did not write all messages. setFlag(Native.EPOLLOUT); break; } } catch (IOException e) { // Continue on write error as a DatagramChannel can write to multiple remote peers // // See https://github.com/netty/netty/issues/2665 in.remove(e); } } } private boolean doWriteMessage(Object msg) throws Exception { final ByteBuf data; InetSocketAddress remoteAddress; if (msg instanceof AddressedEnvelope) { @SuppressWarnings("unchecked") AddressedEnvelope<ByteBuf, InetSocketAddress> envelope = (AddressedEnvelope<ByteBuf, InetSocketAddress>) msg; data = envelope.content(); remoteAddress = envelope.recipient(); } else { data = (ByteBuf) msg; remoteAddress = null; } final int dataLen = data.readableBytes(); if (dataLen == 0) { return true; } if (remoteAddress == null) { remoteAddress = remote; if (remoteAddress == null) { throw new NotYetConnectedException(); } } final int writtenBytes; if (data.hasMemoryAddress()) { long memoryAddress = data.memoryAddress(); writtenBytes = Native.sendToAddress(fd().intValue(), memoryAddress, data.readerIndex(), data.writerIndex(), remoteAddress.getAddress(), remoteAddress.getPort()); } else if (data instanceof CompositeByteBuf) { IovArray array = IovArrayThreadLocal.get((CompositeByteBuf) data); int cnt = array.count(); assert cnt != 0; writtenBytes = Native.sendToAddresses(fd().intValue(), array.memoryAddress(0), cnt, remoteAddress.getAddress(), remoteAddress.getPort()); } else { ByteBuffer nioData = data.internalNioBuffer(data.readerIndex(), data.readableBytes()); writtenBytes = Native.sendTo(fd().intValue(), nioData, nioData.position(), nioData.limit(), remoteAddress.getAddress(), remoteAddress.getPort()); } return writtenBytes > 0; } @Override protected Object filterOutboundMessage(Object msg) { if (msg instanceof DatagramPacket) { DatagramPacket packet = (DatagramPacket) msg; ByteBuf content = packet.content(); if (content.hasMemoryAddress()) { return msg; } if (content.isDirect() && content instanceof CompositeByteBuf) { // Special handling of CompositeByteBuf to reduce memory copies if some of the Components // in the CompositeByteBuf are backed by a memoryAddress. CompositeByteBuf comp = (CompositeByteBuf) content; if (comp.isDirect() && comp.nioBufferCount() <= Native.IOV_MAX) { return msg; } } // We can only handle direct buffers so we need to copy if a non direct is // passed to write. return new DatagramPacket(newDirectBuffer(packet, content), packet.recipient()); } if (msg instanceof ByteBuf) { ByteBuf buf = (ByteBuf) msg; if (!buf.hasMemoryAddress() && (PlatformDependent.hasUnsafe() || !buf.isDirect())) { if (buf instanceof CompositeByteBuf) { // Special handling of CompositeByteBuf to reduce memory copies if some of the Components // in the CompositeByteBuf are backed by a memoryAddress. CompositeByteBuf comp = (CompositeByteBuf) buf; if (!comp.isDirect() || comp.nioBufferCount() > Native.IOV_MAX) { // more then 1024 buffers for gathering writes so just do a memory copy. buf = newDirectBuffer(buf); assert buf.hasMemoryAddress(); } } else { // We can only handle buffers with memory address so we need to copy if a non direct is // passed to write. buf = newDirectBuffer(buf); assert buf.hasMemoryAddress(); } } return buf; } if (msg instanceof AddressedEnvelope) { @SuppressWarnings("unchecked") AddressedEnvelope<Object, SocketAddress> e = (AddressedEnvelope<Object, SocketAddress>) msg; if (e.content() instanceof ByteBuf && (e.recipient() == null || e.recipient() instanceof InetSocketAddress)) { ByteBuf content = (ByteBuf) e.content(); if (content.hasMemoryAddress()) { return e; } if (content instanceof CompositeByteBuf) { // Special handling of CompositeByteBuf to reduce memory copies if some of the Components // in the CompositeByteBuf are backed by a memoryAddress. CompositeByteBuf comp = (CompositeByteBuf) content; if (comp.isDirect() && comp.nioBufferCount() <= Native.IOV_MAX) { return e; } } // We can only handle direct buffers so we need to copy if a non direct is // passed to write. return new DefaultAddressedEnvelope<ByteBuf, InetSocketAddress>( newDirectBuffer(e, content), (InetSocketAddress) e.recipient()); } } throw new UnsupportedOperationException( "unsupported message type: " + StringUtil.simpleClassName(msg) + EXPECTED_TYPES); } @Override public EpollDatagramChannelConfig config() { return config; } @Override protected void doDisconnect() throws Exception { connected = false; } final class EpollDatagramChannelUnsafe extends AbstractEpollUnsafe { private final List<Object> readBuf = new ArrayList<Object>(); @Override public void connect(SocketAddress remote, SocketAddress local, ChannelPromise channelPromise) { boolean success = false; try { try { boolean wasActive = isActive(); InetSocketAddress remoteAddress = (InetSocketAddress) remote; if (local != null) { InetSocketAddress localAddress = (InetSocketAddress) local; doBind(localAddress); } checkResolvable(remoteAddress); EpollDatagramChannel.this.remote = remoteAddress; EpollDatagramChannel.this.local = Native.localAddress(fd().intValue()); success = true; // Regardless if the connection attempt was cancelled, channelActive() event should be triggered, // because what happened is what happened. if (!wasActive && isActive()) { pipeline().fireChannelActive(); } } finally { if (!success) { doClose(); } else { channelPromise.setSuccess(); connected = true; } } } catch (Throwable cause) { channelPromise.setFailure(cause); } } @Override protected EpollRecvByteAllocatorHandle newEpollHandle(RecvByteBufAllocator.Handle handle) { return new EpollRecvByteAllocatorMessageHandle(handle, isFlagSet(Native.EPOLLET)); } @Override void epollInReady() { assert eventLoop().inEventLoop(); DatagramChannelConfig config = config(); boolean edgeTriggered = isFlagSet(Native.EPOLLET); if (!readPending && !edgeTriggered && !config.isAutoRead()) { // ChannelConfig.setAutoRead(false) was called in the meantime clearEpollIn0(); return; } final ChannelPipeline pipeline = pipeline(); final ByteBufAllocator allocator = config.getAllocator(); final EpollRecvByteAllocatorHandle allocHandle = recvBufAllocHandle(); allocHandle.reset(config); Throwable exception = null; try { do { ByteBuf data = null; try { data = allocHandle.allocate(allocator); allocHandle.attemptedBytesRead(data.writableBytes()); final DatagramSocketAddress remoteAddress; if (data.hasMemoryAddress()) { // has a memory address so use optimized call remoteAddress = Native.recvFromAddress( fd().intValue(), data.memoryAddress(), data.writerIndex(), data.capacity()); } else { ByteBuffer nioData = data.internalNioBuffer(data.writerIndex(), data.writableBytes()); remoteAddress = Native.recvFrom( fd().intValue(), nioData, nioData.position(), nioData.limit()); } if (remoteAddress == null) { data.release(); data = null; break; } allocHandle.incMessagesRead(1); allocHandle.lastBytesRead(remoteAddress.receivedAmount); data.writerIndex(data.writerIndex() + allocHandle.lastBytesRead()); readPending = false; readBuf.add(new DatagramPacket(data, (InetSocketAddress) localAddress(), remoteAddress)); data = null; } catch (Throwable t) { if (data != null) { data.release(); data = null; } if (edgeTriggered) { // We do not break from the loop here and remember the last exception, // because we need to consume everything from the socket used with epoll ET. pipeline.fireExceptionCaught(t); } else { exception = t; break; } } } while (allocHandle.continueReading()); int size = readBuf.size(); for (int i = 0; i < size; i ++) { pipeline.fireChannelRead(readBuf.get(i)); } readBuf.clear(); allocHandle.readComplete(); pipeline.fireChannelReadComplete(); if (exception != null) { pipeline.fireExceptionCaught(exception); } } finally { // Check if there is a readPending which was not processed yet. // This could be for two reasons: // * The user called Channel.read() or ChannelHandlerContext.read() in channelRead(...) method // * The user called Channel.read() or ChannelHandlerContext.read() in channelReadComplete(...) method // // See https://github.com/netty/netty/issues/2254 if (!readPending && !config.isAutoRead()) { clearEpollIn(); } } } } /** * Act as special {@link InetSocketAddress} to be able to easily pass all needed data from JNI without the need * to create more objects then needed. */ static final class DatagramSocketAddress extends InetSocketAddress { private static final long serialVersionUID = 1348596211215015739L; // holds the amount of received bytes final int receivedAmount; DatagramSocketAddress(String addr, int port, int receivedAmount) { super(addr, port); this.receivedAmount = receivedAmount; } } }
package com.neustar.ultraservice.schema.v01; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for PoolData complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="PoolData"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;/sequence> * &lt;attribute name="PoolName" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="LBPoolId" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="PoolId" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="PoolType" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="PoolRecordType" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="Bleid" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="PoolStatus" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="PoolDName" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="FailOver" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="Probing" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="MinActiveServers" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="MaxActiveServers" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="ResponseMethod" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="MaxResponse" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="description" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="configured" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "PoolData") public class PoolData { @XmlAttribute(name = "PoolName", required = true) protected String poolName; @XmlAttribute(name = "LBPoolId", required = true) protected String lbPoolId; @XmlAttribute(name = "PoolId", required = true) protected String poolId; @XmlAttribute(name = "PoolType", required = true) protected String poolType; @XmlAttribute(name = "PoolRecordType", required = true) protected String poolRecordType; @XmlAttribute(name = "Bleid", required = true) protected String bleid; @XmlAttribute(name = "PoolStatus", required = true) protected String poolStatus; @XmlAttribute(name = "PoolDName", required = true) protected String poolDName; @XmlAttribute(name = "FailOver", required = true) protected String failOver; @XmlAttribute(name = "Probing", required = true) protected String probing; @XmlAttribute(name = "MinActiveServers", required = true) protected String minActiveServers; @XmlAttribute(name = "MaxActiveServers", required = true) protected String maxActiveServers; @XmlAttribute(name = "ResponseMethod") protected String responseMethod; @XmlAttribute(name = "MaxResponse") protected String maxResponse; @XmlAttribute(name = "description", required = true) protected String description; @XmlAttribute(name = "configured", required = true) protected String configured; /** * Gets the value of the poolName property. * * @return * possible object is * {@link String } * */ public String getPoolName() { return poolName; } /** * Sets the value of the poolName property. * * @param value * allowed object is * {@link String } * */ public void setPoolName(String value) { this.poolName = value; } /** * Gets the value of the lbPoolId property. * * @return * possible object is * {@link String } * */ public String getLBPoolId() { return lbPoolId; } /** * Sets the value of the lbPoolId property. * * @param value * allowed object is * {@link String } * */ public void setLBPoolId(String value) { this.lbPoolId = value; } /** * Gets the value of the poolId property. * * @return * possible object is * {@link String } * */ public String getPoolId() { return poolId; } /** * Sets the value of the poolId property. * * @param value * allowed object is * {@link String } * */ public void setPoolId(String value) { this.poolId = value; } /** * Gets the value of the poolType property. * * @return * possible object is * {@link String } * */ public String getPoolType() { return poolType; } /** * Sets the value of the poolType property. * * @param value * allowed object is * {@link String } * */ public void setPoolType(String value) { this.poolType = value; } /** * Gets the value of the poolRecordType property. * * @return * possible object is * {@link String } * */ public String getPoolRecordType() { return poolRecordType; } /** * Sets the value of the poolRecordType property. * * @param value * allowed object is * {@link String } * */ public void setPoolRecordType(String value) { this.poolRecordType = value; } /** * Gets the value of the bleid property. * * @return * possible object is * {@link String } * */ public String getBleid() { return bleid; } /** * Sets the value of the bleid property. * * @param value * allowed object is * {@link String } * */ public void setBleid(String value) { this.bleid = value; } /** * Gets the value of the poolStatus property. * * @return * possible object is * {@link String } * */ public String getPoolStatus() { return poolStatus; } /** * Sets the value of the poolStatus property. * * @param value * allowed object is * {@link String } * */ public void setPoolStatus(String value) { this.poolStatus = value; } /** * Gets the value of the poolDName property. * * @return * possible object is * {@link String } * */ public String getPoolDName() { return poolDName; } /** * Sets the value of the poolDName property. * * @param value * allowed object is * {@link String } * */ public void setPoolDName(String value) { this.poolDName = value; } /** * Gets the value of the failOver property. * * @return * possible object is * {@link String } * */ public String getFailOver() { return failOver; } /** * Sets the value of the failOver property. * * @param value * allowed object is * {@link String } * */ public void setFailOver(String value) { this.failOver = value; } /** * Gets the value of the probing property. * * @return * possible object is * {@link String } * */ public String getProbing() { return probing; } /** * Sets the value of the probing property. * * @param value * allowed object is * {@link String } * */ public void setProbing(String value) { this.probing = value; } /** * Gets the value of the minActiveServers property. * * @return * possible object is * {@link String } * */ public String getMinActiveServers() { return minActiveServers; } /** * Sets the value of the minActiveServers property. * * @param value * allowed object is * {@link String } * */ public void setMinActiveServers(String value) { this.minActiveServers = value; } /** * Gets the value of the maxActiveServers property. * * @return * possible object is * {@link String } * */ public String getMaxActiveServers() { return maxActiveServers; } /** * Sets the value of the maxActiveServers property. * * @param value * allowed object is * {@link String } * */ public void setMaxActiveServers(String value) { this.maxActiveServers = value; } /** * Gets the value of the responseMethod property. * * @return * possible object is * {@link String } * */ public String getResponseMethod() { return responseMethod; } /** * Sets the value of the responseMethod property. * * @param value * allowed object is * {@link String } * */ public void setResponseMethod(String value) { this.responseMethod = value; } /** * Gets the value of the maxResponse property. * * @return * possible object is * {@link String } * */ public String getMaxResponse() { return maxResponse; } /** * Sets the value of the maxResponse property. * * @param value * allowed object is * {@link String } * */ public void setMaxResponse(String value) { this.maxResponse = value; } /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the configured property. * * @return * possible object is * {@link String } * */ public String getConfigured() { return configured; } /** * Sets the value of the configured property. * * @param value * allowed object is * {@link String } * */ public void setConfigured(String value) { this.configured = value; } }
/* * Copyright 2017, Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.monitoring.v3; import static com.google.cloud.monitoring.v3.PagedResponseWrappers.ListMetricDescriptorsPagedResponse; import static com.google.cloud.monitoring.v3.PagedResponseWrappers.ListMonitoredResourceDescriptorsPagedResponse; import static com.google.cloud.monitoring.v3.PagedResponseWrappers.ListTimeSeriesPagedResponse; import com.google.api.MetricDescriptor; import com.google.api.MonitoredResourceDescriptor; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.gax.core.CredentialsProvider; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.PropertiesProvider; import com.google.api.gax.grpc.CallContext; import com.google.api.gax.grpc.ChannelProvider; import com.google.api.gax.grpc.ClientSettings; import com.google.api.gax.grpc.ExecutorProvider; import com.google.api.gax.grpc.InstantiatingChannelProvider; import com.google.api.gax.grpc.InstantiatingExecutorProvider; import com.google.api.gax.grpc.PageContext; import com.google.api.gax.grpc.PagedCallSettings; import com.google.api.gax.grpc.PagedListDescriptor; import com.google.api.gax.grpc.PagedListResponseFactory; import com.google.api.gax.grpc.SimpleCallSettings; import com.google.api.gax.grpc.UnaryCallSettings; import com.google.api.gax.grpc.UnaryCallable; import com.google.api.gax.retrying.RetrySettings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.monitoring.v3.CreateMetricDescriptorRequest; import com.google.monitoring.v3.CreateTimeSeriesRequest; import com.google.monitoring.v3.DeleteMetricDescriptorRequest; import com.google.monitoring.v3.GetMetricDescriptorRequest; import com.google.monitoring.v3.GetMonitoredResourceDescriptorRequest; import com.google.monitoring.v3.ListMetricDescriptorsRequest; import com.google.monitoring.v3.ListMetricDescriptorsResponse; import com.google.monitoring.v3.ListMonitoredResourceDescriptorsRequest; import com.google.monitoring.v3.ListMonitoredResourceDescriptorsResponse; import com.google.monitoring.v3.ListTimeSeriesRequest; import com.google.monitoring.v3.ListTimeSeriesResponse; import com.google.monitoring.v3.TimeSeries; import com.google.protobuf.Empty; import io.grpc.Status; import java.io.IOException; import java.util.List; import javax.annotation.Generated; import org.threeten.bp.Duration; // AUTO-GENERATED DOCUMENTATION AND CLASS /** * Settings class to configure an instance of {@link MetricServiceClient}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (monitoring.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. For * example, to set the total timeout of getMonitoredResourceDescriptor to 30 seconds: * * <pre> * <code> * MetricServiceSettings.Builder metricServiceSettingsBuilder = * MetricServiceSettings.defaultBuilder(); * metricServiceSettingsBuilder.getMonitoredResourceDescriptorSettings().getRetrySettingsBuilder() * .setTotalTimeout(Duration.ofSeconds(30)); * MetricServiceSettings metricServiceSettings = metricServiceSettingsBuilder.build(); * </code> * </pre> */ @Generated("by GAPIC v0.0.5") @BetaApi public class MetricServiceSettings extends ClientSettings { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/monitoring") .add("https://www.googleapis.com/auth/monitoring.read") .add("https://www.googleapis.com/auth/monitoring.write") .build(); private static final String DEFAULT_GAPIC_NAME = "gapic"; private static final String DEFAULT_GAPIC_VERSION = ""; private static final String PROPERTIES_FILE = "/com/google/cloud/monitoring/project.properties"; private static final String META_VERSION_KEY = "artifact.version"; private static String gapicVersion; private static final io.grpc.MethodDescriptor< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse> METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.monitoring.v3.MetricService/ListMonitoredResourceDescriptors", io.grpc.protobuf.ProtoUtils.marshaller( ListMonitoredResourceDescriptorsRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller( ListMonitoredResourceDescriptorsResponse.getDefaultInstance())); private static final io.grpc.MethodDescriptor< GetMonitoredResourceDescriptorRequest, MonitoredResourceDescriptor> METHOD_GET_MONITORED_RESOURCE_DESCRIPTOR = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.monitoring.v3.MetricService/GetMonitoredResourceDescriptor", io.grpc.protobuf.ProtoUtils.marshaller( GetMonitoredResourceDescriptorRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller( MonitoredResourceDescriptor.getDefaultInstance())); private static final io.grpc.MethodDescriptor< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse> METHOD_LIST_METRIC_DESCRIPTORS = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.monitoring.v3.MetricService/ListMetricDescriptors", io.grpc.protobuf.ProtoUtils.marshaller( ListMetricDescriptorsRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller( ListMetricDescriptorsResponse.getDefaultInstance())); private static final io.grpc.MethodDescriptor<GetMetricDescriptorRequest, MetricDescriptor> METHOD_GET_METRIC_DESCRIPTOR = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.monitoring.v3.MetricService/GetMetricDescriptor", io.grpc.protobuf.ProtoUtils.marshaller( GetMetricDescriptorRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(MetricDescriptor.getDefaultInstance())); private static final io.grpc.MethodDescriptor<CreateMetricDescriptorRequest, MetricDescriptor> METHOD_CREATE_METRIC_DESCRIPTOR = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.monitoring.v3.MetricService/CreateMetricDescriptor", io.grpc.protobuf.ProtoUtils.marshaller( CreateMetricDescriptorRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(MetricDescriptor.getDefaultInstance())); private static final io.grpc.MethodDescriptor<DeleteMetricDescriptorRequest, Empty> METHOD_DELETE_METRIC_DESCRIPTOR = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.monitoring.v3.MetricService/DeleteMetricDescriptor", io.grpc.protobuf.ProtoUtils.marshaller( DeleteMetricDescriptorRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(Empty.getDefaultInstance())); private static final io.grpc.MethodDescriptor<ListTimeSeriesRequest, ListTimeSeriesResponse> METHOD_LIST_TIME_SERIES = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.monitoring.v3.MetricService/ListTimeSeries", io.grpc.protobuf.ProtoUtils.marshaller(ListTimeSeriesRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(ListTimeSeriesResponse.getDefaultInstance())); private static final io.grpc.MethodDescriptor<CreateTimeSeriesRequest, Empty> METHOD_CREATE_TIME_SERIES = io.grpc.MethodDescriptor.create( io.grpc.MethodDescriptor.MethodType.UNARY, "google.monitoring.v3.MetricService/CreateTimeSeries", io.grpc.protobuf.ProtoUtils.marshaller(CreateTimeSeriesRequest.getDefaultInstance()), io.grpc.protobuf.ProtoUtils.marshaller(Empty.getDefaultInstance())); private final PagedCallSettings< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, ListMonitoredResourceDescriptorsPagedResponse> listMonitoredResourceDescriptorsSettings; private final SimpleCallSettings< GetMonitoredResourceDescriptorRequest, MonitoredResourceDescriptor> getMonitoredResourceDescriptorSettings; private final PagedCallSettings< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse, ListMetricDescriptorsPagedResponse> listMetricDescriptorsSettings; private final SimpleCallSettings<GetMetricDescriptorRequest, MetricDescriptor> getMetricDescriptorSettings; private final SimpleCallSettings<CreateMetricDescriptorRequest, MetricDescriptor> createMetricDescriptorSettings; private final SimpleCallSettings<DeleteMetricDescriptorRequest, Empty> deleteMetricDescriptorSettings; private final PagedCallSettings< ListTimeSeriesRequest, ListTimeSeriesResponse, ListTimeSeriesPagedResponse> listTimeSeriesSettings; private final SimpleCallSettings<CreateTimeSeriesRequest, Empty> createTimeSeriesSettings; /** Returns the object with the settings used for calls to listMonitoredResourceDescriptors. */ public PagedCallSettings< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, ListMonitoredResourceDescriptorsPagedResponse> listMonitoredResourceDescriptorsSettings() { return listMonitoredResourceDescriptorsSettings; } /** Returns the object with the settings used for calls to getMonitoredResourceDescriptor. */ public SimpleCallSettings<GetMonitoredResourceDescriptorRequest, MonitoredResourceDescriptor> getMonitoredResourceDescriptorSettings() { return getMonitoredResourceDescriptorSettings; } /** Returns the object with the settings used for calls to listMetricDescriptors. */ public PagedCallSettings< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse, ListMetricDescriptorsPagedResponse> listMetricDescriptorsSettings() { return listMetricDescriptorsSettings; } /** Returns the object with the settings used for calls to getMetricDescriptor. */ public SimpleCallSettings<GetMetricDescriptorRequest, MetricDescriptor> getMetricDescriptorSettings() { return getMetricDescriptorSettings; } /** Returns the object with the settings used for calls to createMetricDescriptor. */ public SimpleCallSettings<CreateMetricDescriptorRequest, MetricDescriptor> createMetricDescriptorSettings() { return createMetricDescriptorSettings; } /** Returns the object with the settings used for calls to deleteMetricDescriptor. */ public SimpleCallSettings<DeleteMetricDescriptorRequest, Empty> deleteMetricDescriptorSettings() { return deleteMetricDescriptorSettings; } /** Returns the object with the settings used for calls to listTimeSeries. */ public PagedCallSettings< ListTimeSeriesRequest, ListTimeSeriesResponse, ListTimeSeriesPagedResponse> listTimeSeriesSettings() { return listTimeSeriesSettings; } /** Returns the object with the settings used for calls to createTimeSeries. */ public SimpleCallSettings<CreateTimeSeriesRequest, Empty> createTimeSeriesSettings() { return createTimeSeriesSettings; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ public static String getDefaultEndpoint() { return "monitoring.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingChannelProvider.Builder defaultChannelProviderBuilder() { return InstantiatingChannelProvider.newBuilder() .setEndpoint(getDefaultEndpoint()) .setGeneratorHeader(DEFAULT_GAPIC_NAME, getGapicVersion()); } private static String getGapicVersion() { if (gapicVersion == null) { gapicVersion = PropertiesProvider.loadProperty( MetricServiceSettings.class, PROPERTIES_FILE, META_VERSION_KEY); gapicVersion = gapicVersion == null ? DEFAULT_GAPIC_VERSION : gapicVersion; } return gapicVersion; } /** Returns a builder for this class with recommended defaults. */ public static Builder defaultBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return new Builder(); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } private MetricServiceSettings(Builder settingsBuilder) throws IOException { super( settingsBuilder.getExecutorProvider(), settingsBuilder.getChannelProvider(), settingsBuilder.getCredentialsProvider()); listMonitoredResourceDescriptorsSettings = settingsBuilder.listMonitoredResourceDescriptorsSettings().build(); getMonitoredResourceDescriptorSettings = settingsBuilder.getMonitoredResourceDescriptorSettings().build(); listMetricDescriptorsSettings = settingsBuilder.listMetricDescriptorsSettings().build(); getMetricDescriptorSettings = settingsBuilder.getMetricDescriptorSettings().build(); createMetricDescriptorSettings = settingsBuilder.createMetricDescriptorSettings().build(); deleteMetricDescriptorSettings = settingsBuilder.deleteMetricDescriptorSettings().build(); listTimeSeriesSettings = settingsBuilder.listTimeSeriesSettings().build(); createTimeSeriesSettings = settingsBuilder.createTimeSeriesSettings().build(); } private static final PagedListDescriptor< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, MonitoredResourceDescriptor> LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_DESC = new PagedListDescriptor< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, MonitoredResourceDescriptor>() { @Override public String emptyToken() { return ""; } @Override public ListMonitoredResourceDescriptorsRequest injectToken( ListMonitoredResourceDescriptorsRequest payload, String token) { return ListMonitoredResourceDescriptorsRequest.newBuilder(payload) .setPageToken(token) .build(); } @Override public ListMonitoredResourceDescriptorsRequest injectPageSize( ListMonitoredResourceDescriptorsRequest payload, int pageSize) { return ListMonitoredResourceDescriptorsRequest.newBuilder(payload) .setPageSize(pageSize) .build(); } @Override public Integer extractPageSize(ListMonitoredResourceDescriptorsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListMonitoredResourceDescriptorsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<MonitoredResourceDescriptor> extractResources( ListMonitoredResourceDescriptorsResponse payload) { return payload.getResourceDescriptorsList(); } }; private static final PagedListDescriptor< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse, MetricDescriptor> LIST_METRIC_DESCRIPTORS_PAGE_STR_DESC = new PagedListDescriptor< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse, MetricDescriptor>() { @Override public String emptyToken() { return ""; } @Override public ListMetricDescriptorsRequest injectToken( ListMetricDescriptorsRequest payload, String token) { return ListMetricDescriptorsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListMetricDescriptorsRequest injectPageSize( ListMetricDescriptorsRequest payload, int pageSize) { return ListMetricDescriptorsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListMetricDescriptorsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListMetricDescriptorsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<MetricDescriptor> extractResources( ListMetricDescriptorsResponse payload) { return payload.getMetricDescriptorsList(); } }; private static final PagedListDescriptor< ListTimeSeriesRequest, ListTimeSeriesResponse, TimeSeries> LIST_TIME_SERIES_PAGE_STR_DESC = new PagedListDescriptor<ListTimeSeriesRequest, ListTimeSeriesResponse, TimeSeries>() { @Override public String emptyToken() { return ""; } @Override public ListTimeSeriesRequest injectToken(ListTimeSeriesRequest payload, String token) { return ListTimeSeriesRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListTimeSeriesRequest injectPageSize( ListTimeSeriesRequest payload, int pageSize) { return ListTimeSeriesRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListTimeSeriesRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListTimeSeriesResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<TimeSeries> extractResources(ListTimeSeriesResponse payload) { return payload.getTimeSeriesList(); } }; private static final PagedListResponseFactory< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, ListMonitoredResourceDescriptorsPagedResponse> LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_FACT = new PagedListResponseFactory< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, ListMonitoredResourceDescriptorsPagedResponse>() { @Override public ApiFuture<ListMonitoredResourceDescriptorsPagedResponse> getFuturePagedResponse( UnaryCallable< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse> callable, ListMonitoredResourceDescriptorsRequest request, CallContext context, ApiFuture<ListMonitoredResourceDescriptorsResponse> futureResponse) { PageContext< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, MonitoredResourceDescriptor> pageContext = PageContext.create( callable, LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_DESC, request, context); return ListMonitoredResourceDescriptorsPagedResponse.createAsync( pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse, ListMetricDescriptorsPagedResponse> LIST_METRIC_DESCRIPTORS_PAGE_STR_FACT = new PagedListResponseFactory< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse, ListMetricDescriptorsPagedResponse>() { @Override public ApiFuture<ListMetricDescriptorsPagedResponse> getFuturePagedResponse( UnaryCallable<ListMetricDescriptorsRequest, ListMetricDescriptorsResponse> callable, ListMetricDescriptorsRequest request, CallContext context, ApiFuture<ListMetricDescriptorsResponse> futureResponse) { PageContext< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse, MetricDescriptor> pageContext = PageContext.create( callable, LIST_METRIC_DESCRIPTORS_PAGE_STR_DESC, request, context); return ListMetricDescriptorsPagedResponse.createAsync(pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListTimeSeriesRequest, ListTimeSeriesResponse, ListTimeSeriesPagedResponse> LIST_TIME_SERIES_PAGE_STR_FACT = new PagedListResponseFactory< ListTimeSeriesRequest, ListTimeSeriesResponse, ListTimeSeriesPagedResponse>() { @Override public ApiFuture<ListTimeSeriesPagedResponse> getFuturePagedResponse( UnaryCallable<ListTimeSeriesRequest, ListTimeSeriesResponse> callable, ListTimeSeriesRequest request, CallContext context, ApiFuture<ListTimeSeriesResponse> futureResponse) { PageContext<ListTimeSeriesRequest, ListTimeSeriesResponse, TimeSeries> pageContext = PageContext.create(callable, LIST_TIME_SERIES_PAGE_STR_DESC, request, context); return ListTimeSeriesPagedResponse.createAsync(pageContext, futureResponse); } }; /** Builder for MetricServiceSettings. */ public static class Builder extends ClientSettings.Builder { private final ImmutableList<UnaryCallSettings.Builder> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, ListMonitoredResourceDescriptorsPagedResponse> listMonitoredResourceDescriptorsSettings; private final SimpleCallSettings.Builder< GetMonitoredResourceDescriptorRequest, MonitoredResourceDescriptor> getMonitoredResourceDescriptorSettings; private final PagedCallSettings.Builder< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse, ListMetricDescriptorsPagedResponse> listMetricDescriptorsSettings; private final SimpleCallSettings.Builder<GetMetricDescriptorRequest, MetricDescriptor> getMetricDescriptorSettings; private final SimpleCallSettings.Builder<CreateMetricDescriptorRequest, MetricDescriptor> createMetricDescriptorSettings; private final SimpleCallSettings.Builder<DeleteMetricDescriptorRequest, Empty> deleteMetricDescriptorSettings; private final PagedCallSettings.Builder< ListTimeSeriesRequest, ListTimeSeriesResponse, ListTimeSeriesPagedResponse> listTimeSeriesSettings; private final SimpleCallSettings.Builder<CreateTimeSeriesRequest, Empty> createTimeSeriesSettings; private static final ImmutableMap<String, ImmutableSet<Status.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<Status.Code>> definitions = ImmutableMap.builder(); definitions.put( "idempotent", Sets.immutableEnumSet( Lists.<Status.Code>newArrayList( Status.Code.DEADLINE_EXCEEDED, Status.Code.UNAVAILABLE))); definitions.put("non_idempotent", Sets.immutableEnumSet(Lists.<Status.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings.Builder> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings.Builder> definitions = ImmutableMap.builder(); RetrySettings.Builder settingsBuilder = null; settingsBuilder = RetrySettings.newBuilder() .setInitialRetryDelay(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelay(Duration.ofMillis(60000L)) .setInitialRpcTimeout(Duration.ofMillis(20000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeout(Duration.ofMillis(20000L)) .setTotalTimeout(Duration.ofMillis(600000L)); definitions.put("default", settingsBuilder); RETRY_PARAM_DEFINITIONS = definitions.build(); } private Builder() { super(defaultChannelProviderBuilder().build()); setCredentialsProvider(defaultCredentialsProviderBuilder().build()); listMonitoredResourceDescriptorsSettings = PagedCallSettings.newBuilder( METHOD_LIST_MONITORED_RESOURCE_DESCRIPTORS, LIST_MONITORED_RESOURCE_DESCRIPTORS_PAGE_STR_FACT); getMonitoredResourceDescriptorSettings = SimpleCallSettings.newBuilder(METHOD_GET_MONITORED_RESOURCE_DESCRIPTOR); listMetricDescriptorsSettings = PagedCallSettings.newBuilder( METHOD_LIST_METRIC_DESCRIPTORS, LIST_METRIC_DESCRIPTORS_PAGE_STR_FACT); getMetricDescriptorSettings = SimpleCallSettings.newBuilder(METHOD_GET_METRIC_DESCRIPTOR); createMetricDescriptorSettings = SimpleCallSettings.newBuilder(METHOD_CREATE_METRIC_DESCRIPTOR); deleteMetricDescriptorSettings = SimpleCallSettings.newBuilder(METHOD_DELETE_METRIC_DESCRIPTOR); listTimeSeriesSettings = PagedCallSettings.newBuilder(METHOD_LIST_TIME_SERIES, LIST_TIME_SERIES_PAGE_STR_FACT); createTimeSeriesSettings = SimpleCallSettings.newBuilder(METHOD_CREATE_TIME_SERIES); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder>of( listMonitoredResourceDescriptorsSettings, getMonitoredResourceDescriptorSettings, listMetricDescriptorsSettings, getMetricDescriptorSettings, createMetricDescriptorSettings, deleteMetricDescriptorSettings, listTimeSeriesSettings, createTimeSeriesSettings); } private static Builder createDefault() { Builder builder = new Builder(); builder .listMonitoredResourceDescriptorsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .getMonitoredResourceDescriptorSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .listMetricDescriptorsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .getMetricDescriptorSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .createMetricDescriptorSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .deleteMetricDescriptorSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .listTimeSeriesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); builder .createTimeSeriesSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent")) .setRetrySettingsBuilder(RETRY_PARAM_DEFINITIONS.get("default")); return builder; } private Builder(MetricServiceSettings settings) { super(settings); listMonitoredResourceDescriptorsSettings = settings.listMonitoredResourceDescriptorsSettings.toBuilder(); getMonitoredResourceDescriptorSettings = settings.getMonitoredResourceDescriptorSettings.toBuilder(); listMetricDescriptorsSettings = settings.listMetricDescriptorsSettings.toBuilder(); getMetricDescriptorSettings = settings.getMetricDescriptorSettings.toBuilder(); createMetricDescriptorSettings = settings.createMetricDescriptorSettings.toBuilder(); deleteMetricDescriptorSettings = settings.deleteMetricDescriptorSettings.toBuilder(); listTimeSeriesSettings = settings.listTimeSeriesSettings.toBuilder(); createTimeSeriesSettings = settings.createTimeSeriesSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder>of( listMonitoredResourceDescriptorsSettings, getMonitoredResourceDescriptorSettings, listMetricDescriptorsSettings, getMetricDescriptorSettings, createMetricDescriptorSettings, deleteMetricDescriptorSettings, listTimeSeriesSettings, createTimeSeriesSettings); } @Override public Builder setExecutorProvider(ExecutorProvider executorProvider) { super.setExecutorProvider(executorProvider); return this; } @Override public Builder setChannelProvider(ChannelProvider channelProvider) { super.setChannelProvider(channelProvider); return this; } @Override public Builder setCredentialsProvider(CredentialsProvider credentialsProvider) { super.setCredentialsProvider(credentialsProvider); return this; } /** * Applies the given settings to all of the unary API methods in this service. Only values that * are non-null will be applied, so this method is not capable of un-setting any values. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods(UnaryCallSettings.Builder unaryCallSettings) throws Exception { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, unaryCallSettings); return this; } /** Returns the builder for the settings used for calls to listMonitoredResourceDescriptors. */ public PagedCallSettings.Builder< ListMonitoredResourceDescriptorsRequest, ListMonitoredResourceDescriptorsResponse, ListMonitoredResourceDescriptorsPagedResponse> listMonitoredResourceDescriptorsSettings() { return listMonitoredResourceDescriptorsSettings; } /** Returns the builder for the settings used for calls to getMonitoredResourceDescriptor. */ public SimpleCallSettings.Builder< GetMonitoredResourceDescriptorRequest, MonitoredResourceDescriptor> getMonitoredResourceDescriptorSettings() { return getMonitoredResourceDescriptorSettings; } /** Returns the builder for the settings used for calls to listMetricDescriptors. */ public PagedCallSettings.Builder< ListMetricDescriptorsRequest, ListMetricDescriptorsResponse, ListMetricDescriptorsPagedResponse> listMetricDescriptorsSettings() { return listMetricDescriptorsSettings; } /** Returns the builder for the settings used for calls to getMetricDescriptor. */ public SimpleCallSettings.Builder<GetMetricDescriptorRequest, MetricDescriptor> getMetricDescriptorSettings() { return getMetricDescriptorSettings; } /** Returns the builder for the settings used for calls to createMetricDescriptor. */ public SimpleCallSettings.Builder<CreateMetricDescriptorRequest, MetricDescriptor> createMetricDescriptorSettings() { return createMetricDescriptorSettings; } /** Returns the builder for the settings used for calls to deleteMetricDescriptor. */ public SimpleCallSettings.Builder<DeleteMetricDescriptorRequest, Empty> deleteMetricDescriptorSettings() { return deleteMetricDescriptorSettings; } /** Returns the builder for the settings used for calls to listTimeSeries. */ public PagedCallSettings.Builder< ListTimeSeriesRequest, ListTimeSeriesResponse, ListTimeSeriesPagedResponse> listTimeSeriesSettings() { return listTimeSeriesSettings; } /** Returns the builder for the settings used for calls to createTimeSeries. */ public SimpleCallSettings.Builder<CreateTimeSeriesRequest, Empty> createTimeSeriesSettings() { return createTimeSeriesSettings; } @Override public MetricServiceSettings build() throws IOException { return new MetricServiceSettings(this); } } }
/** * Copyright (C) 2012-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ninja.validation; import java.util.Set; import java.util.regex.Pattern; import javax.validation.ValidatorFactory; /** * Built in validators * * @author James Roper */ public class Validators { public static class JSRValidator implements Validator<Object> { @Override public void validate(Object value, String field, Validation validation) { if (value == null) { return; } ValidatorFactory validatorFactory = javax.validation.Validation .buildDefaultValidatorFactory(); javax.validation.Validator validator = validatorFactory .getValidator(); Set<javax.validation.ConstraintViolation<Object>> violations = validator .validate(value); for (javax.validation.ConstraintViolation<Object> violation : violations) { ConstraintViolation constraintViolation = ConstraintViolation.create(violation.getMessage(), violation .getInvalidValue()); validation.addBeanViolation(new FieldViolation(violation .getPropertyPath().toString(), constraintViolation)); } } @Override public Class<Object> getValidatedType() { return Object.class; } } public static class RequiredValidator implements Validator<Object> { private final Required required; public RequiredValidator(Required required) { this.required = required; } @Override public void validate(Object value, String field, Validation validation) { if (value == null) { validation.addFieldViolation( field, ConstraintViolation.createForFieldWithDefault( required.key(), fieldKey(field, required.fieldKey()), required.message())); } } @Override public Class<Object> getValidatedType() { return Object.class; } } public static class LengthValidator implements Validator<String> { private final Length length; public LengthValidator(Length length) { this.length = length; } @Override public void validate(String value, String field, Validation validation) { if (value != null) { if (length.max() != -1 && value.length() > length.max()) { validation.addFieldViolation(field, ConstraintViolation .createForFieldWithDefault(length.maxKey(), fieldKey(field, length.fieldKey()), length.maxMessage(), length.max(), value)); } else if (length.min() != -1 && value.length() < length.min()) { validation.addFieldViolation(field, ConstraintViolation .createForFieldWithDefault(length.minKey(), fieldKey(field, length.fieldKey()), length.minMessage(), length.min(), value)); } } } @Override public Class<String> getValidatedType() { return String.class; } } public static class IntegerValidator implements Validator<String> { private final IsInteger isInteger; public IntegerValidator(IsInteger integer) { isInteger = integer; } @Override public void validate(String value, String field, Validation validation) { if (value != null) { try { Long.parseLong(value); } catch (NumberFormatException e) { validation.addFieldViolation(field, ConstraintViolation .createForFieldWithDefault(isInteger.key(), fieldKey(field, isInteger.fieldKey()), isInteger.message(), value)); } } } @Override public Class<String> getValidatedType() { return String.class; } } public static class FloatValidator implements Validator<String> { private final IsFloat isFloat; public FloatValidator(IsFloat aFloat) { isFloat = aFloat; } @Override public void validate(String value, String field, Validation validation) { if (value != null) { try { Double.parseDouble(value); } catch (NumberFormatException e) { validation.addFieldViolation(field, ConstraintViolation .createForFieldWithDefault(isFloat.key(), fieldKey(field, isFloat.fieldKey()), isFloat.message(), value)); } } } @Override public Class<String> getValidatedType() { return String.class; } } public static class MatchesValidator implements Validator<String> { private final Matches matches; private final Pattern pattern; public MatchesValidator(Matches matches) { this.matches = matches; pattern = Pattern.compile(matches.regexp()); } @Override public void validate(String value, String field, Validation validation) { if (value != null) { if (!pattern.matcher(value).matches()) { validation .addFieldViolation( field, ConstraintViolation.createForFieldWithDefault( matches.key(), fieldKey(field, matches.fieldKey()), matches.message(), matches.regexp(), value)); } } } @Override public Class<String> getValidatedType() { return String.class; } } public static class NumberValidator implements Validator<Number> { private final NumberValue number; public NumberValidator(NumberValue number) { this.number = number; } @Override public void validate(Number value, String field, Validation validation) { if (value != null) { if (number.max() != Double.MAX_VALUE && value.doubleValue() > number.max()) { validation.addFieldViolation(field, ConstraintViolation .createForFieldWithDefault(number.maxKey(), fieldKey(field, number.fieldKey()), number.maxMessage(), number.max(), value)); } else if (number.min() != -1 && value.doubleValue() < number.min()) { validation.addFieldViolation(field, ConstraintViolation .createForFieldWithDefault(number.minKey(), fieldKey(field, number.fieldKey()), number.minMessage(), number.min(), value)); } } } @Override public Class<Number> getValidatedType() { return Number.class; } } public static class EnumValidator implements Validator<String> { private final IsEnum isEnum; public EnumValidator(IsEnum anEnum) { isEnum = anEnum; } @Override public void validate(String value, String field, Validation validation) { if (value != null) { Enum<?>[] values = isEnum.enumClass().getEnumConstants(); for (Enum<?> v : values) { if (isEnum.caseSensitive()) { if (v.name().equals(value)) { return; } } else { if (v.name().equalsIgnoreCase(value)) { return; } } } validation.addFieldViolation(field, ConstraintViolation.createForFieldWithDefault( IsEnum.KEY, field, IsEnum.MESSAGE, new Object [] {value, isEnum.enumClass().getName()})); } } @Override public Class<String> getValidatedType() { return String.class; } } private static String fieldKey(String fieldName, String configuredFieldKey) { if (configuredFieldKey.length() > 0) { return configuredFieldKey; } else { return fieldName; } } }
/** * Copyright (c) 2013-2020 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.index.simple; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.apache.log4j.Logger; import org.locationtech.geowave.core.index.ByteArrayRange; import org.locationtech.geowave.core.index.ByteArrayUtils; import org.locationtech.geowave.core.index.Coordinate; import org.locationtech.geowave.core.index.IndexMetaData; import org.locationtech.geowave.core.index.InsertionIds; import org.locationtech.geowave.core.index.MultiDimensionalCoordinateRanges; import org.locationtech.geowave.core.index.MultiDimensionalCoordinates; import org.locationtech.geowave.core.index.NumericIndexStrategy; import org.locationtech.geowave.core.index.QueryRanges; import org.locationtech.geowave.core.index.SinglePartitionQueryRanges; import org.locationtech.geowave.core.index.StringUtils; import org.locationtech.geowave.core.index.dimension.BasicDimensionDefinition; import org.locationtech.geowave.core.index.dimension.NumericDimensionDefinition; import org.locationtech.geowave.core.index.lexicoder.NumberLexicoder; import org.locationtech.geowave.core.index.numeric.BasicNumericDataset; import org.locationtech.geowave.core.index.numeric.MultiDimensionalNumericData; import org.locationtech.geowave.core.index.numeric.NumericData; import org.locationtech.geowave.core.index.numeric.NumericValue; /** * A simple 1-dimensional NumericIndexStrategy that represents an index of signed integer values * (currently supports 16 bit, 32 bit, and 64 bit integers). The strategy doesn't use any binning. * The ids are simply the byte arrays of the value. This index strategy will not perform well for * inserting ranges because there will be too much replication of data. */ public abstract class SimpleNumericIndexStrategy<T extends Number> implements NumericIndexStrategy { private static final Logger LOGGER = Logger.getLogger(SimpleNumericIndexStrategy.class); private final NumberLexicoder<T> lexicoder; private final NumericDimensionDefinition[] definitions; protected SimpleNumericIndexStrategy(final NumberLexicoder<T> lexicoder) { this( lexicoder, new NumericDimensionDefinition[] { new BasicDimensionDefinition( lexicoder.getMinimumValue().doubleValue(), lexicoder.getMaximumValue().doubleValue())}); } protected SimpleNumericIndexStrategy( final NumberLexicoder<T> lexicoder, final NumericDimensionDefinition[] definitions) { this.lexicoder = lexicoder; this.definitions = definitions; } public NumberLexicoder<T> getLexicoder() { return lexicoder; } /** * Cast a double into the type T * * @param value a double value * @return the value represented as a T */ protected abstract T cast(double value); /** * Checks whehter * * @return the value represented as a T */ protected abstract boolean isInteger(); /** * Always returns a single range since this is a 1-dimensional index. The sort-order of the bytes * is the same as the sort order of values, so an indexedRange can be represented by a single * contiguous ByteArrayRange. {@inheritDoc} */ @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final IndexMetaData... hints) { return getQueryRanges(indexedRange, -1, hints); } /** * Always returns a single range since this is a 1-dimensional index. The sort-order of the bytes * is the same as the sort order of values, so an indexedRange can be represented by a single * contiguous ByteArrayRange. {@inheritDoc} */ @Override public QueryRanges getQueryRanges( final MultiDimensionalNumericData indexedRange, final int maxEstimatedRangeDecomposition, final IndexMetaData... hints) { final T min = cast(indexedRange.getDataPerDimension()[0].getMin()); byte[] start = lexicoder.toByteArray(min); final T max = cast( isInteger() ? Math.ceil(indexedRange.getDataPerDimension()[0].getMax()) : indexedRange.getMaxValuesPerDimension()[0]); byte[] end = lexicoder.toByteArray(max); if (!indexedRange.getDataPerDimension()[0].isMinInclusive()) { start = ByteArrayUtils.getNextPrefix(start); } if (!indexedRange.getDataPerDimension()[0].isMaxInclusive()) { end = ByteArrayUtils.getPreviousPrefix(end); } final ByteArrayRange range = new ByteArrayRange(start, end); final SinglePartitionQueryRanges partitionRange = new SinglePartitionQueryRanges(Collections.singletonList(range)); return new QueryRanges(Collections.singletonList(partitionRange)); } /** * Returns all of the insertion ids for the range. Since this index strategy doensn't use binning, * it will return the ByteArrayId of every value in the range (i.e. if you are storing a range * using this index strategy, your data will be replicated for every integer value in the range). * * <p> {@inheritDoc} */ @Override public InsertionIds getInsertionIds(final MultiDimensionalNumericData indexedData) { return getInsertionIds(indexedData, -1); } /** * Returns all of the insertion ids for the range. Since this index strategy doensn't use binning, * it will return the ByteArrayId of every value in the range (i.e. if you are storing a range * using this index strategy, your data will be replicated for every integer value in the range). * * <p> {@inheritDoc} */ @Override public InsertionIds getInsertionIds( final MultiDimensionalNumericData indexedData, final int maxEstimatedDuplicateIds) { if (indexedData.isEmpty()) { LOGGER.warn("Cannot index empty fields, skipping writing row to index '" + getId() + "'"); return new InsertionIds(); } final double min = indexedData.getMinValuesPerDimension()[0]; final double max = indexedData.getMaxValuesPerDimension()[0]; final List<byte[]> insertionIds = new ArrayList<>((int) (max - min) + 1); for (double i = min; i <= max; i++) { insertionIds.add(lexicoder.toByteArray(cast(i))); } return new InsertionIds(insertionIds); } @Override public NumericDimensionDefinition[] getOrderedDimensionDefinitions() { return definitions; } @Override public MultiDimensionalNumericData getRangeForId( final byte[] partitionKey, final byte[] sortKey) { final double value = lexicoder.fromByteArray(sortKey).doubleValue(); final NumericData[] dataPerDimension = new NumericData[] {new NumericValue(value)}; return new BasicNumericDataset(dataPerDimension); } @Override public MultiDimensionalCoordinates getCoordinatesPerDimension( final byte[] partitionKey, final byte[] sortKey) { return new MultiDimensionalCoordinates( null, new Coordinate[] {new Coordinate(lexicoder.fromByteArray(sortKey).longValue(), null)}); } @Override public MultiDimensionalCoordinateRanges[] getCoordinateRangesPerDimension( final MultiDimensionalNumericData dataRange, final IndexMetaData... hints) { return null; } @Override public double[] getHighestPrecisionIdRangePerDimension() { return new double[] {1d}; } @Override public String getId() { return StringUtils.intToString(hashCode()); } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + Arrays.hashCode(definitions); result = (prime * result) + ((lexicoder == null) ? 0 : lexicoder.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final SimpleNumericIndexStrategy<?> other = (SimpleNumericIndexStrategy<?>) obj; if (!Arrays.equals(definitions, other.definitions)) { return false; } if (lexicoder == null) { if (other.lexicoder != null) { return false; } } else if (!lexicoder.equals(other.lexicoder)) { return false; } return true; } @Override public List<IndexMetaData> createMetaData() { return Collections.emptyList(); } @Override public int getPartitionKeyLength() { return 0; } @Override public byte[][] getInsertionPartitionKeys(final MultiDimensionalNumericData insertionData) { return null; } @Override public byte[][] getQueryPartitionKeys( final MultiDimensionalNumericData queryData, final IndexMetaData... hints) { return null; } @Override public byte[] toBinary() { return new byte[] {}; } @Override public void fromBinary(final byte[] bytes) {} }
// Copyright (c) 2003-present, Jodd Team (jodd.org). All Rights Reserved. package jodd.htmlstapler; import jodd.bean.BeanUtil; import jodd.datetime.TimeUtil; import jodd.io.StreamUtil; import jodd.lagarto.TagVisitor; import jodd.lagarto.TagWriter; import jodd.lagarto.adapter.StripHtmlTagAdapter; import jodd.lagarto.filter.SimpleLagartoServletFilter; import jodd.servlet.DispatcherUtil; import jodd.servlet.ServletUtil; import jodd.util.MimeTypes; import jodd.util.StringPool; import jodd.log.Logger; import jodd.log.LoggerFactory; import javax.servlet.FilterConfig; import javax.servlet.ServletContext; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.OutputStream; import static jodd.htmlstapler.HtmlStaplerBundlesManager.Strategy; /** * HtmlStapler filter. * Part of the parameters are here, the other part is in the * {@link #createBundleManager(javax.servlet.ServletContext, jodd.htmlstapler.HtmlStaplerBundlesManager.Strategy)} bundle manager}. */ public class HtmlStaplerFilter extends SimpleLagartoServletFilter { private static final Logger log = LoggerFactory.getLogger(HtmlStaplerFilter.class); protected HtmlStaplerBundlesManager bundlesManager; protected boolean enabled = true; protected boolean stripHtml = true; protected boolean resetOnStart = true; protected boolean useGzip; protected int cacheMaxAge = TimeUtil.SECONDS_IN_DAY * 30; protected Strategy staplerStrategy = Strategy.RESOURCES_ONLY; @Override public void init(FilterConfig filterConfig) throws ServletException { super.init(filterConfig); bundlesManager = createBundleManager(filterConfig.getServletContext(), staplerStrategy); readFilterConfigParameters(filterConfig, this, "enabled", "stripHtml", "resetOnStart", "useGzip", "cacheMaxAge" ); String staplerStrategyName = filterConfig.getInitParameter("strategy"); if (staplerStrategyName != null) { if (staplerStrategyName.equalsIgnoreCase("ACTION_MANAGED")) { staplerStrategy = Strategy.ACTION_MANAGED; } } readFilterConfigParameters(filterConfig, bundlesManager, "bundleFolder", "downloadLocal", "localAddressAndPort", "localFilesEncoding", "notFoundExceptionEnabled", "sortResources", "staplerPath", "randomDigestChars" ); if (resetOnStart) { bundlesManager.reset(); } } /** * Reads filter config parameters and set into destination target. */ protected void readFilterConfigParameters(FilterConfig filterConfig, Object target, String... parameters) { for (String parameter : parameters) { String value = filterConfig.getInitParameter(parameter); if (value != null) { BeanUtil.setDeclaredProperty(target, parameter, value); } } } /** * Creates {@link HtmlStaplerBundlesManager} instance. */ protected HtmlStaplerBundlesManager createBundleManager(ServletContext servletContext, Strategy strategy) { String webRoot = servletContext.getRealPath(StringPool.EMPTY); String contextPath = ServletUtil.getContextPath(servletContext); return new HtmlStaplerBundlesManager(contextPath, webRoot, strategy); } @Override protected LagartoParsingProcessor createParsingProcessor() { if (enabled == false) { return null; } return new LagartoParsingProcessor(true) { @Override protected char[] parse(TagWriter rootTagWriter, HttpServletRequest request) { TagVisitor visitor = rootTagWriter; if (stripHtml) { visitor = new StripHtmlTagAdapter(rootTagWriter) { @Override public void end() { super.end(); if (log.isDebugEnabled()) { log.debug("Stripped: " + getStrippedCharsCount() + " chars"); } } }; } String servletPath = DispatcherUtil.getServletPath(request); HtmlStaplerTagAdapter htmlStaplerTagAdapter = new HtmlStaplerTagAdapter(bundlesManager, servletPath, visitor); // todo add more adapters char[] content = invokeLagarto(htmlStaplerTagAdapter); return htmlStaplerTagAdapter.postProcess(content); } }; } @Override protected boolean processActionPath(HttpServletRequest servletRequest, HttpServletResponse servletResponse, String actionPath) throws IOException { String bundlePath = '/' + bundlesManager.getStaplerPath() + '/'; if (actionPath.startsWith(bundlePath) == false) { return false; } String bundleId = actionPath.substring(bundlePath.length()); File file = bundlesManager.lookupBundleFile(bundleId); if (log.isDebugEnabled()) { log.debug("bundle: " + bundleId); } int ndx = bundleId.lastIndexOf('.'); String extension = bundleId.substring(ndx + 1); String contentType = MimeTypes.getMimeType(extension); servletResponse.setContentType(contentType); if (useGzip && ServletUtil.isGzipSupported(servletRequest)) { file = bundlesManager.lookupGzipBundleFile(file); servletResponse.setHeader("Content-Encoding", "gzip"); } if (file.exists() == false) { throw new IOException("bundle not found: " + bundleId); } servletResponse.setHeader("Content-Length", String.valueOf(file.length())); servletResponse.setHeader("Last-Modified", TimeUtil.formatHttpDate(file.lastModified())); if (cacheMaxAge > 0) { servletResponse.setHeader("Cache-Control", "max-age=" + cacheMaxAge); } sendBundleFile(servletResponse, file); return true; } /** * Outputs bundle file to the response. */ protected void sendBundleFile(HttpServletResponse resp, File bundleFile) throws IOException { OutputStream out = resp.getOutputStream(); StreamUtil.copy(new FileInputStream(bundleFile), out); } }
/* * Copyright 2000-2013 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.highlighter.custom; import com.intellij.ide.highlighter.custom.tokens.KeywordParser; import java.util.Arrays; import java.util.Set; import java.util.TreeSet; /** * @author Yura Cangea * @version 1.0 */ public class SyntaxTable implements Cloneable { private Set<String> myKeywords1; private Set<String> myKeywords2; private Set<String> myKeywords3; private Set<String> myKeywords4; private String myLineComment; public boolean lineCommentOnlyAtStart; private String myStartComment; private String myEndComment; private String myHexPrefix; private String myNumPostfixChars; private boolean myIgnoreCase; private boolean myHasBraces; private boolean myHasBrackets; private boolean myHasParens; private boolean myHasStringEscapes; private volatile KeywordParser myKeywordParser; // ------------------------------------------------------------------------- // Constructor // ------------------------------------------------------------------------- public SyntaxTable() { myKeywords1 = new TreeSet<String>(); myKeywords2 = new TreeSet<String>(); myKeywords3 = new TreeSet<String>(); myKeywords4 = new TreeSet<String>(); } KeywordParser getKeywordParser() { KeywordParser parser = myKeywordParser; if (parser == null) { synchronized (this) { parser = myKeywordParser; if (parser == null) { myKeywordParser = parser = new KeywordParser(Arrays.asList(myKeywords1, myKeywords2, myKeywords3, myKeywords4), myIgnoreCase); } } } return parser; } @Override protected Object clone() throws CloneNotSupportedException { SyntaxTable cl = (SyntaxTable) super.clone(); cl.myKeywords1 = new TreeSet<String>(myKeywords1); cl.myKeywords2 = new TreeSet<String>(myKeywords2); cl.myKeywords3 = new TreeSet<String>(myKeywords3); cl.myKeywords4 = new TreeSet<String>(myKeywords4); cl.myKeywordParser = null; return cl; } // ------------------------------------------------------------------------- // Public interface // ------------------------------------------------------------------------- public void addKeyword1(String keyword) { myKeywords1.add(keyword); myKeywordParser = null; } public Set<String> getKeywords1() { return myKeywords1; } public void addKeyword2(String keyword) { myKeywords2.add(keyword); myKeywordParser = null; } public Set<String> getKeywords2() { return myKeywords2; } public void addKeyword3(String keyword) { myKeywords3.add(keyword); myKeywordParser = null; } public Set<String> getKeywords3() { return myKeywords3; } public void addKeyword4(String keyword) { myKeywords4.add(keyword); myKeywordParser = null; } public Set<String> getKeywords4() { return myKeywords4; } public String getLineComment() { return myLineComment; } public void setLineComment(String lineComment) { myLineComment = lineComment; } public String getStartComment() { return myStartComment; } public void setStartComment(String startComment) { myStartComment = startComment; } public String getEndComment() { return myEndComment; } public void setEndComment(String endComment) { myEndComment = endComment; } public String getHexPrefix() { return myHexPrefix; } public void setHexPrefix(String hexPrefix) { myHexPrefix = hexPrefix; } public String getNumPostfixChars() { return myNumPostfixChars; } public void setNumPostfixChars(String numPostfixChars) { myNumPostfixChars = numPostfixChars; } public boolean isIgnoreCase() { return myIgnoreCase; } public void setIgnoreCase(boolean ignoreCase) { myIgnoreCase = ignoreCase; myKeywordParser = null; } public boolean isHasBraces() { return myHasBraces; } public void setHasBraces(boolean hasBraces) { myHasBraces = hasBraces; } public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof SyntaxTable)) return false; final SyntaxTable syntaxTable = (SyntaxTable)o; if (myIgnoreCase != syntaxTable.myIgnoreCase) return false; if (myEndComment != null ? !myEndComment.equals(syntaxTable.myEndComment) : syntaxTable.myEndComment != null) return false; if (myHexPrefix != null ? !myHexPrefix.equals(syntaxTable.myHexPrefix) : syntaxTable.myHexPrefix != null) return false; if (!myKeywords1.equals(syntaxTable.myKeywords1)) return false; if (!myKeywords2.equals(syntaxTable.myKeywords2)) return false; if (!myKeywords3.equals(syntaxTable.myKeywords3)) return false; if (!myKeywords4.equals(syntaxTable.myKeywords4)) return false; if (myLineComment != null ? !myLineComment.equals(syntaxTable.myLineComment) : syntaxTable.myLineComment != null) return false; if (myNumPostfixChars != null ? !myNumPostfixChars.equals(syntaxTable.myNumPostfixChars) : syntaxTable.myNumPostfixChars != null) return false; if (myStartComment != null ? !myStartComment.equals(syntaxTable.myStartComment) : syntaxTable.myStartComment != null) return false; if (myHasBraces != syntaxTable.myHasBraces) return false; if (myHasBrackets != syntaxTable.myHasBrackets) return false; if (myHasParens != syntaxTable.myHasParens) return false; if (myHasStringEscapes != syntaxTable.myHasStringEscapes) return false; if (lineCommentOnlyAtStart != syntaxTable.lineCommentOnlyAtStart) return false; return true; } public int hashCode() { return myKeywords1.hashCode(); } public boolean isHasBrackets() { return myHasBrackets; } public boolean isHasParens() { return myHasParens; } public void setHasBrackets(boolean hasBrackets) { myHasBrackets = hasBrackets; } public void setHasParens(boolean hasParens) { myHasParens = hasParens; } public boolean isHasStringEscapes() { return myHasStringEscapes; } public void setHasStringEscapes(final boolean hasEscapes) { myHasStringEscapes = hasEscapes; } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ /* * Changes for SnappyData data platform. * * Portions Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package io.snappydata.thrift.internal; import java.io.IOException; import java.io.InputStream; import java.io.Reader; import java.sql.SQLException; import com.pivotal.gemfirexd.internal.shared.common.reference.SQLState; import io.snappydata.thrift.common.ThriftExceptionUtil; import io.snappydata.thrift.snappydataConstants; /** * Base class for client BLOB and CLOB implementations. * * @author swale */ abstract class ClientLobBase { protected final ClientService service; protected final long lobId; protected ClientFinalizer finalizer; protected boolean streamedInput; protected int streamOffset; protected int length; protected ClientLobBase(ClientService service) { this.service = service; this.lobId = snappydataConstants.INVALID_ID; this.finalizer = null; this.streamedInput = true; this.length = -1; } protected ClientLobBase(ClientService service, long lobId, HostConnection source) { this.service = service; this.lobId = lobId; // invalid LOB ID means single lob chunk so ignore finalizer for that case if (lobId != snappydataConstants.INVALID_ID) { this.finalizer = new ClientFinalizer(this, service, snappydataConstants.BULK_CLOSE_LOB); this.finalizer.updateReferentData(lobId, source); } else { this.finalizer = null; } this.streamedInput = false; } protected final HostConnection getLobSource(boolean throwOnFailure, String op) throws SQLException { final ClientFinalizer finalizer = this.finalizer; final HostConnection source; if (finalizer != null && (source = finalizer.source) != null) { return source; } else if (throwOnFailure) { throw (SQLException)service.newExceptionForNodeFailure(null, op, service.isolationLevel, null, false); } else { return null; } } protected final int getLength(boolean forceMaterialize) throws SQLException { final int len = this.length; if (len >= 0) { return len; } else if (this.streamedInput) { return (this.length = streamLength(forceMaterialize)); } else { // LOB has been freed return 0; } } /** * @see java.sql.Blob#length() * @see java.sql.Clob#length() */ public final long length() throws SQLException { final int len = getLength(true); if (len >= 0) { return len; } else { throw ThriftExceptionUtil.newSQLException( SQLState.LOB_OBJECT_LENGTH_UNKNOWN_YET); } } static int readStream(final InputStream is, byte[] buf, int offset, int len) throws IOException { int readLen = 0; int readBytes = 0; while (len > 0 && (readBytes = is.read(buf, offset, len)) > 0) { readLen += readBytes; offset += readLen; len -= readLen; } return readLen > 0 ? readLen : (readBytes < 0 ? -1 : 0); } static int readStream(final Reader reader, char[] buf, int offset, int len) throws IOException { int readLen = 0; int readBytes = 0; while (len > 0 && (readBytes = reader.read(buf, offset, len)) > 0) { readLen += readBytes; offset += readLen; len -= readLen; } return readLen > 0 ? readLen : (readBytes < 0 ? -1 : 0); } protected void checkOffset(long offset) throws SQLException { if (offset < 0) { throw ThriftExceptionUtil.newSQLException(SQLState.BLOB_BAD_POSITION, null, offset + 1); } else if (offset >= Integer.MAX_VALUE) { throw ThriftExceptionUtil.newSQLException( SQLState.BLOB_POSITION_TOO_LARGE, null, offset + 1); } } protected void checkLength(long length) throws SQLException { if (length < 0) { throw ThriftExceptionUtil.newSQLException( SQLState.BLOB_NONPOSITIVE_LENGTH, null, length); } else if (length > Integer.MAX_VALUE) { throw ThriftExceptionUtil.newSQLException( SQLState.BLOB_LENGTH_TOO_LONG, null, length); } } protected int checkOffset(long offset, long length, boolean trimIfRequired) throws SQLException { checkOffset(offset); checkLength(length); if (trimIfRequired && this.length >= 0) { long maxLen = this.length - offset; if (maxLen < 0) { throw ThriftExceptionUtil.newSQLException( SQLState.BLOB_POSITION_TOO_LARGE, null, offset + 1); } // return trimmed length if blob was truncated length = Math.min(maxLen, length); offset = 0; } if ((offset + length) > Integer.MAX_VALUE) { throw ThriftExceptionUtil.newSQLException( SQLState.BLOB_LENGTH_TOO_LONG, null, length); } return (int)length; } /** * @see java.sql.Blob#truncate(long) * @see java.sql.Clob#truncate(long) */ public void truncate(long len) throws SQLException { checkLength(len); final int length = getLength(false); if (length >= 0 && length < len) { throw ThriftExceptionUtil.newSQLException( SQLState.BLOB_LENGTH_TOO_LONG, null, len); } this.length = (int)len; } public final void free() { final ClientFinalizer finalizer = this.finalizer; if (finalizer != null) { finalizer.clear(); finalizer.getHolder().addToPendingQueue(finalizer); this.finalizer = null; } this.streamedInput = false; this.streamOffset = 0; this.length = -1; clear(); } protected abstract int streamLength(boolean forceMaterialize) throws SQLException; protected abstract void clear(); }
/* * The MIT License * * Copyright (c) 2016, CloudBees, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.jenkinsci.remoting.engine; import edu.umd.cs.findbugs.annotations.CheckForNull; import edu.umd.cs.findbugs.annotations.NonNull; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.jenkinsci.remoting.util.KeyUtils; import org.jenkinsci.remoting.util.ThrowableUtils; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.net.URL; import java.nio.channels.SocketChannel; import java.nio.charset.StandardCharsets; import java.security.interfaces.RSAPublicKey; import java.util.Collections; import java.util.LinkedHashSet; import java.util.Objects; import java.util.Set; /** * Represents a {@code TcpSlaveAgentListener} endpoint details. * * @since 3.0 */ public class JnlpAgentEndpoint { /** * The hostname to connect to. */ @NonNull private final String host; private final int port; /** * The {@code InstanceIdentity.getPublic()} of the instance or {@code null} if the instance identity was not * published. */ @CheckForNull private final RSAPublicKey publicKey; /** * The {@code AgentProtocol.getName()}s supported by the instance or {@code null} if the supported protocols are * not published. */ @CheckForNull private final Set<String> protocols; /** * Jenkins URL for the discovered endpoint. */ @CheckForNull private final URL serviceUrl; /** * @deprecated Use {@link #JnlpAgentEndpoint(java.lang.String, int, java.security.interfaces.RSAPublicKey, java.util.Set, java.net.URL)} */ @Deprecated public JnlpAgentEndpoint(@NonNull String host, int port, @CheckForNull RSAPublicKey publicKey, @CheckForNull Set<String> protocols) { this(host, port, publicKey, protocols, null); } /** * Constructor for a remote {@code Jenkins} instance. * * @param host the hostname. * @param port the port. * @param publicKey the {@code InstanceIdentity.getPublic()} of the remote instance (if known). * @param protocols The supported protocols. * @param serviceURL URL of the service hosting the remoting endpoint. * Use {@code null} if it is not a web service or if the URL cannot be determined * @since 3.0 */ public JnlpAgentEndpoint(@NonNull String host, int port, @CheckForNull RSAPublicKey publicKey, @CheckForNull Set<String> protocols, @CheckForNull URL serviceURL) { if (port <= 0 || 65536 <= port) { throw new IllegalArgumentException("Port " + port + " is not in the range 1-65535"); } this.host = host; this.port = port; this.publicKey = publicKey; this.protocols = protocols == null || protocols.isEmpty() ? null : Collections.unmodifiableSet(new LinkedHashSet<>(protocols)); this.serviceUrl = serviceURL; } /** * Gets the socket address. * * @return the socket address */ @NonNull public InetSocketAddress getAddress() { return new InetSocketAddress(host, port); } /** * Retrieves URL of the web service providing the remoting endpoint. * @return Service URL if available. {@code null} otherwise. */ @CheckForNull public URL getServiceUrl() { return serviceUrl; } /** * Gets the hostname. * * @return the hostname. */ @NonNull public String getHost() { return host; } /** * Gets the port. * * @return the port. */ public int getPort() { return port; } /** * Gets the {@code InstanceIdentity.getPublic()} if available. * * @return the {@code InstanceIdentity.getPublic()} or {@code null}. */ @CheckForNull public RSAPublicKey getPublicKey() { return publicKey; } /** * Gets the supported protocols if available. * * @return the supported protocols or {@code null}. */ @CheckForNull public Set<String> getProtocols() { return protocols; } /** * Checks if the named protocol is supported. * * @param name the name of the protocol to check. * @return {@code false} if and only if the endpoint reports supported protocols and the named protocol is not in * the list of supported protocols. */ public boolean isProtocolSupported(@NonNull String name) { return protocols == null || protocols.contains(name); } /** * Opens a socket connection to the remote endpoint. * * @param socketTimeout the {@link Socket#setSoTimeout(int)} to apply to the socket. * @return the socket. * @throws IOException if things go wrong. */ @SuppressFBWarnings(value = "VA_FORMAT_STRING_USES_NEWLINE", justification = "Unsafe endline symbol is a pert of the protocol. Unsafe to fix it. See TODO " + "below") public Socket open(int socketTimeout) throws IOException { boolean isHttpProxy = false; InetSocketAddress targetAddress = null; SocketChannel channel = null; try { targetAddress = JnlpAgentEndpointResolver.getResolvedHttpProxyAddress(host, port); if (targetAddress == null) { targetAddress = new InetSocketAddress(host, port); } else { isHttpProxy = true; } // We open the socket using SocketChannel so that we are assured that the socket will always have // a socket channel. Sockets opened via Socket.open will typically not have a SocketChannel // and thus we will not have the ability to use NIO if we want to. channel = SocketChannel.open(targetAddress); Socket socket = channel.socket(); socket.setTcpNoDelay(true); // we'll do buffering by ourselves // set read time out to avoid infinite hang. the time out should be long enough so as not // to interfere with normal operation. the main purpose of this is that when the other peer dies // abruptly, we shouldn't hang forever, and at some point we should notice that the connection // is gone. socket.setSoTimeout(socketTimeout); if (isHttpProxy) { String connectCommand = String.format("CONNECT %s:%s HTTP/1.1\r\nHost: %s\r\n\r\n", host, port, host); socket.getOutputStream() .write(connectCommand.getBytes(StandardCharsets.UTF_8)); // TODO: internationalized domain names BufferedInputStream is = new BufferedInputStream(socket.getInputStream()); BufferedReader reader = new BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8)); String line = reader.readLine(); if (line == null) throw new IOException("Proxy socket closed"); String[] responseLineParts = line.trim().split(" "); if (responseLineParts.length < 2 || !responseLineParts[1].equals("200")) { throw new IOException("Got a bad response from proxy: " + line); } while ((line = reader.readLine()) != null && !line.trim().isEmpty()) { // Do nothing, scrolling through headers returned from proxy } } return socket; } catch (IOException e) { if (channel != null) { try { channel.close(); } catch (IOException suppressed) { e = ThrowableUtils.chain(e, suppressed); } } String suffix = ""; if (isHttpProxy) { suffix = " through proxy " + targetAddress.toString(); } throw new IOException("Failed to connect to " + host + ':' + port + suffix, e); } } /** * {@inheritDoc} */ @Override public int hashCode() { return host.hashCode(); } /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } JnlpAgentEndpoint that = (JnlpAgentEndpoint) o; if (port != that.port) { return false; } if (!KeyUtils.equals(publicKey, that.publicKey)) { return false; } if (!Objects.equals(protocols, that.protocols)) { return false; } if (host.equals(that.host)) { return true; } // now need to ensure that we do special handling for local addresses InetAddress thisAddr = this.getAddress().getAddress(); InetAddress thatAddr = that.getAddress().getAddress(); if (!thisAddr.getClass().equals(thatAddr.getClass())) { // differentiate Inet4Address from Inet6Address return false; } if (thisAddr.isAnyLocalAddress()) { return (thatAddr.isLinkLocalAddress() || thatAddr.isLoopbackAddress() || thatAddr.isAnyLocalAddress()); } if (thatAddr.isAnyLocalAddress()) { return (thisAddr.isLinkLocalAddress() || thisAddr.isLoopbackAddress()); } return false; } /** * {@inheritDoc} */ @Override public String toString() { return "JnlpAgentEndpoint{" + "host=" + host + ", port=" + port + ", publicKey=" + KeyUtils.fingerprint(publicKey) + ", protocols=" + protocols + '}'; } }
/* * Copyright (C) 2015 Haruki Hasegawa * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.h6ah4i.android.example.advrecyclerview.demo_ds; import android.support.v4.view.ViewCompat; import android.support.v7.widget.RecyclerView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import com.h6ah4i.android.example.advrecyclerview.R; import com.h6ah4i.android.example.advrecyclerview.common.data.AbstractDataProvider; import com.h6ah4i.android.example.advrecyclerview.common.utils.ViewUtils; import com.h6ah4i.android.widget.advrecyclerview.draggable.DraggableItemAdapter; import com.h6ah4i.android.widget.advrecyclerview.draggable.ItemDraggableRange; import com.h6ah4i.android.widget.advrecyclerview.draggable.RecyclerViewDragDropManager; import com.h6ah4i.android.widget.advrecyclerview.swipeable.RecyclerViewSwipeManager; import com.h6ah4i.android.widget.advrecyclerview.swipeable.SwipeableItemAdapter; import com.h6ah4i.android.widget.advrecyclerview.utils.AbstractDraggableSwipeableItemViewHolder; import com.h6ah4i.android.widget.advrecyclerview.utils.RecyclerViewAdapterUtils; public class MyDraggableSwipeableItemAdapter extends RecyclerView.Adapter<MyDraggableSwipeableItemAdapter.MyViewHolder> implements DraggableItemAdapter<MyDraggableSwipeableItemAdapter.MyViewHolder>, SwipeableItemAdapter<MyDraggableSwipeableItemAdapter.MyViewHolder> { private static final String TAG = "MyDSItemAdapter"; private AbstractDataProvider mProvider; private EventListener mEventListener; private View.OnClickListener mItemViewOnClickListener; private View.OnClickListener mSwipeableViewContainerOnClickListener; public interface EventListener { void onItemRemoved(int position); void onItemPinned(int position); void onItemViewClicked(View v, boolean pinned); } public static class MyViewHolder extends AbstractDraggableSwipeableItemViewHolder { public ViewGroup mContainer; public View mDragHandle; public TextView mTextView; public MyViewHolder(View v) { super(v); mContainer = (ViewGroup) v.findViewById(R.id.container); mDragHandle = v.findViewById(R.id.drag_handle); mTextView = (TextView) v.findViewById(android.R.id.text1); } @Override public View getSwipeableContainerView() { return mContainer; } } public MyDraggableSwipeableItemAdapter(AbstractDataProvider dataProvider) { mProvider = dataProvider; mItemViewOnClickListener = new View.OnClickListener() { @Override public void onClick(View v) { onItemViewClick(v); } }; mSwipeableViewContainerOnClickListener = new View.OnClickListener() { @Override public void onClick(View v) { onSwipeableViewContainerClick(v); } }; // DraggableItemAdapter and SwipeableItemAdapter require stable ID, and also // have to implement the getItemId() method appropriately. setHasStableIds(true); } private void onItemViewClick(View v) { if (mEventListener != null) { mEventListener.onItemViewClicked(v, true); // true --- pinned } } private void onSwipeableViewContainerClick(View v) { if (mEventListener != null) { mEventListener.onItemViewClicked(RecyclerViewAdapterUtils.getParentViewHolderItemView(v), false); // false --- not pinned } } @Override public long getItemId(int position) { return mProvider.getItem(position).getId(); } @Override public int getItemViewType(int position) { return mProvider.getItem(position).getViewType(); } @Override public MyViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { final LayoutInflater inflater = LayoutInflater.from(parent.getContext()); final View v = inflater.inflate((viewType == 0) ? R.layout.list_item : R.layout.list_item2, parent, false); return new MyViewHolder(v); } @Override public void onBindViewHolder(MyViewHolder holder, int position) { final AbstractDataProvider.Data item = mProvider.getItem(position); // set listeners // (if the item is *not pinned*, click event comes to the itemView) holder.itemView.setOnClickListener(mItemViewOnClickListener); // (if the item is *pinned*, click event comes to the mContainer) holder.mContainer.setOnClickListener(mSwipeableViewContainerOnClickListener); // set text holder.mTextView.setText(item.getText()); // set background resource (target view ID: container) final int dragState = holder.getDragStateFlags(); final int swipeState = holder.getSwipeStateFlags(); if (((dragState & RecyclerViewDragDropManager.STATE_FLAG_IS_UPDATED) != 0) || ((swipeState & RecyclerViewSwipeManager.STATE_FLAG_IS_UPDATED) != 0)) { int bgResId; if ((dragState & RecyclerViewDragDropManager.STATE_FLAG_IS_ACTIVE) != 0) { bgResId = R.drawable.bg_item_dragging_active_state; } else if ((dragState & RecyclerViewDragDropManager.STATE_FLAG_DRAGGING) != 0) { bgResId = R.drawable.bg_item_dragging_state; } else if ((swipeState & RecyclerViewSwipeManager.STATE_FLAG_IS_ACTIVE) != 0) { bgResId = R.drawable.bg_item_swiping_active_state; } else if ((swipeState & RecyclerViewSwipeManager.STATE_FLAG_SWIPING) != 0) { bgResId = R.drawable.bg_item_swiping_state; } else { bgResId = R.drawable.bg_item_normal_state; } holder.mContainer.setBackgroundResource(bgResId); } // set swiping properties holder.setSwipeItemSlideAmount( item.isPinnedToSwipeLeft() ? RecyclerViewSwipeManager.OUTSIDE_OF_THE_WINDOW_LEFT : 0); } @Override public int getItemCount() { return mProvider.getCount(); } @Override public void onMoveItem(int fromPosition, int toPosition) { Log.d(TAG, "onMoveItem(fromPosition = " + fromPosition + ", toPosition = " + toPosition + ")"); if (fromPosition == toPosition) { return; } mProvider.moveItem(fromPosition, toPosition); notifyItemMoved(fromPosition, toPosition); } @Override public boolean onCheckCanStartDrag(MyViewHolder holder, int position, int x, int y) { // x, y --- relative from the itemView's top-left final View containerView = holder.mContainer; final View dragHandleView = holder.mDragHandle; final int offsetX = containerView.getLeft() + (int) (ViewCompat.getTranslationX(containerView) + 0.5f); final int offsetY = containerView.getTop() + (int) (ViewCompat.getTranslationY(containerView) + 0.5f); return ViewUtils.hitTest(dragHandleView, x - offsetX, y - offsetY); } @Override public ItemDraggableRange onGetItemDraggableRange(MyViewHolder holder, int position) { // no drag-sortable range specified return null; } @Override public int onGetSwipeReactionType(MyViewHolder holder, int position, int x, int y) { if (onCheckCanStartDrag(holder, position, x, y)) { return RecyclerViewSwipeManager.REACTION_CAN_NOT_SWIPE_BOTH; } else { return mProvider.getItem(position).getSwipeReactionType(); } } @Override public void onSetSwipeBackground(MyViewHolder holder, int position, int type) { int bgRes = 0; switch (type) { case RecyclerViewSwipeManager.DRAWABLE_SWIPE_NEUTRAL_BACKGROUND: bgRes = R.drawable.bg_swipe_item_neutral; break; case RecyclerViewSwipeManager.DRAWABLE_SWIPE_LEFT_BACKGROUND: bgRes = R.drawable.bg_swipe_item_left; break; case RecyclerViewSwipeManager.DRAWABLE_SWIPE_RIGHT_BACKGROUND: bgRes = R.drawable.bg_swipe_item_right; break; } holder.itemView.setBackgroundResource(bgRes); } @Override public int onSwipeItem(MyViewHolder holder, int position, int result) { Log.d(TAG, "onSwipeItem(result = " + result + ")"); switch (result) { // swipe right case RecyclerViewSwipeManager.RESULT_SWIPED_RIGHT: if (mProvider.getItem(position).isPinnedToSwipeLeft()) { // pinned --- back to default position return RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_DEFAULT; } else { // not pinned --- remove return RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_REMOVE_ITEM; } // swipe left -- pin case RecyclerViewSwipeManager.RESULT_SWIPED_LEFT: return RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_MOVE_TO_SWIPED_DIRECTION; // other --- do nothing case RecyclerViewSwipeManager.RESULT_CANCELED: default: return RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_DEFAULT; } } @Override public void onPerformAfterSwipeReaction(MyViewHolder holder, int position, int result, int reaction) { Log.d(TAG, "onPerformAfterSwipeReaction(result = " + result + ", reaction = " + reaction + ")"); final AbstractDataProvider.Data item = mProvider.getItem(position); if (reaction == RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_REMOVE_ITEM) { mProvider.removeItem(position); notifyItemRemoved(position); if (mEventListener != null) { mEventListener.onItemRemoved(position); } } else if (reaction == RecyclerViewSwipeManager.AFTER_SWIPE_REACTION_MOVE_TO_SWIPED_DIRECTION) { item.setPinnedToSwipeLeft(true); notifyItemChanged(position); if (mEventListener != null) { mEventListener.onItemPinned(position); } } else { item.setPinnedToSwipeLeft(false); } } public EventListener getEventListener() { return mEventListener; } public void setEventListener(EventListener eventListener) { mEventListener = eventListener; } }
/*- * See the file LICENSE for redistribution information. * * Copyright (c) 2002, 2010 Oracle and/or its affiliates. All rights reserved. * * $Id$ */ package com.sleepycat.db; import com.sleepycat.db.internal.DbConstants; import com.sleepycat.db.internal.Dbc; /** A database cursor for a secondary database. Cursors are not thread safe and the application is responsible for coordinating any multithreaded access to a single cursor object. <p> Secondary cursors are returned by {@link SecondaryDatabase#openCursor SecondaryDatabase.openCursor} and {@link SecondaryDatabase#openSecondaryCursor SecondaryDatabase.openSecondaryCursor}. The distinguishing characteristics of a secondary cursor are: <ul> <li>Direct calls to <code>put()</code> methods on a secondary cursor are prohibited. <li>The {@link #delete} method of a secondary cursor will delete the primary record and as well as all its associated secondary records. <li>Calls to all get methods will return the data from the associated primary database. <li>Additional get method signatures are provided to return the primary key in an additional pKey parameter. <li>Calls to {@link #dup} will return a {@link SecondaryCursor}. <li>The {@link #dupSecondary} method is provided to return a {@link SecondaryCursor} that doesn't require casting. </ul> <p> To obtain a secondary cursor with default attributes: <blockquote><pre> SecondaryCursor cursor = myDb.openSecondaryCursor(txn, null); </pre></blockquote> To customize the attributes of a cursor, use a CursorConfig object. <blockquote><pre> CursorConfig config = new CursorConfig(); config.setDirtyRead(true); SecondaryCursor cursor = myDb.openSecondaryCursor(txn, config); </pre></blockquote> */ public class SecondaryCursor extends Cursor { /* package */ SecondaryCursor(final SecondaryDatabase database, final Dbc dbc, final CursorConfig config) throws DatabaseException { super(database, dbc, config); } /** Return the SecondaryDatabase handle associated with this Cursor. <p> @return The SecondaryDatabase handle associated with this Cursor. <p> */ public SecondaryDatabase getSecondaryDatabase() { return (SecondaryDatabase)super.getDatabase(); } /** Returns a new <code>SecondaryCursor</code> for the same transaction as the original cursor. */ public Cursor dup(final boolean samePosition) throws DatabaseException { return dupSecondary(samePosition); } /** Returns a new copy of the cursor as a <code>SecondaryCursor</code>. <p> Calling this method is the equivalent of calling {@link #dup} and casting the result to {@link SecondaryCursor}. <p> @see #dup */ public SecondaryCursor dupSecondary(final boolean samePosition) throws DatabaseException { return new SecondaryCursor(getSecondaryDatabase(), dbc.dup(samePosition ? DbConstants.DB_POSITION : 0), config); } /** Returns the key/data pair to which the cursor refers. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key returned as output. Its byte array does not need to be initialized by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Multiple results can be retrieved by passing an object that is a subclass of {@link com.sleepycat.db.MultipleEntry MultipleEntry}, otherwise its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the key/pair at the cursor position has been deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getCurrent(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_CURRENT | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Move the cursor to the first key/data pair of the database, and return that pair. If the first key has duplicate values, the first data item in the set of duplicates is returned. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key returned as output. Its byte array does not need to be initialized by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Multiple results can be retrieved by passing an object that is a subclass of {@link com.sleepycat.db.MultipleEntry MultipleEntry}, otherwise its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getFirst(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_FIRST | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Move the cursor to the last key/data pair of the database, and return that pair. If the last key has duplicate values, the last data item in the set of duplicates is returned. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key returned as output. Its byte array does not need to be initialized by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getLast(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_LAST | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Move the cursor to the next key/data pair and return that pair. If the matching key has duplicate values, the first data item in the set of duplicates is returned. <p> If the cursor is not yet initialized, move the cursor to the first key/data pair of the database, and return that pair. Otherwise, the cursor is moved to the next key/data pair of the database, and that pair is returned. In the presence of duplicate key values, the value of the key may not change. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key returned as output. Its byte array does not need to be initialized by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Multiple results can be retrieved by passing an object that is a subclass of {@link com.sleepycat.db.MultipleEntry MultipleEntry}, otherwise its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getNext(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_NEXT | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** If the next key/data pair of the database is a duplicate data record for the current key/data pair, move the cursor to the next key/data pair of the database and return that pair. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key returned as output. Its byte array does not need to be initialized by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Multiple results can be retrieved by passing an object that is a subclass of {@link com.sleepycat.db.MultipleEntry MultipleEntry}, otherwise its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getNextDup(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_NEXT_DUP | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Move the cursor to the next non-duplicate key/data pair and return that pair. If the matching key has duplicate values, the first data item in the set of duplicates is returned. <p> If the cursor is not yet initialized, move the cursor to the first key/data pair of the database, and return that pair. Otherwise, the cursor is moved to the next non-duplicate key of the database, and that key/data pair is returned. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key returned as output. Its byte array does not need to be initialized by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Multiple results can be retrieved by passing an object that is a subclass of {@link com.sleepycat.db.MultipleEntry MultipleEntry}, otherwise its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getNextNoDup(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_NEXT_NODUP | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Move the cursor to the previous key/data pair and return that pair. If the matching key has duplicate values, the last data item in the set of duplicates is returned. <p> If the cursor is not yet initialized, move the cursor to the last key/data pair of the database, and return that pair. Otherwise, the cursor is moved to the previous key/data pair of the database, and that pair is returned. In the presence of duplicate key values, the value of the key may not change. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key returned as output. Its byte array does not need to be initialized by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getPrev(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_PREV | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** If the previous key/data pair of the database is a duplicate data record for the current key/data pair, move the cursor to the previous key/data pair of the database and return that pair. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key returned as output. Its byte array does not need to be initialized by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getPrevDup(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { /* * "Get the previous duplicate" isn't directly supported by the C API, * so here's how to get it: dup the cursor and call getPrev, then dup * the result and call getNextDup. If both succeed then there was a * previous duplicate and the first dup is sitting on it. Keep that, * and call getCurrent to fill in the user's buffers. */ Dbc dup1 = dbc.dup(DbConstants.DB_POSITION); try { int errCode = dup1.get(DatabaseEntry.IGNORE, DatabaseEntry.IGNORE, DbConstants.DB_PREV | LockMode.getFlag(lockMode)); if (errCode == 0) { Dbc dup2 = dup1.dup(DbConstants.DB_POSITION); try { errCode = dup2.get(DatabaseEntry.IGNORE, DatabaseEntry.IGNORE, DbConstants.DB_NEXT_DUP | LockMode.getFlag(lockMode)); } finally { dup2.close(); } } if (errCode == 0) errCode = dup1.pget(key, pKey, data, DbConstants.DB_CURRENT | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag())); if (errCode == 0) { Dbc tdbc = dbc; dbc = dup1; dup1 = tdbc; } return OperationStatus.fromInt(errCode); } finally { dup1.close(); } } /** Move the cursor to the previous non-duplicate key/data pair and return that pair. If the matching key has duplicate values, the last data item in the set of duplicates is returned. <p> If the cursor is not yet initialized, move the cursor to the last key/data pair of the database, and return that pair. Otherwise, the cursor is moved to the previous non-duplicate key of the database, and that key/data pair is returned. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key returned as output. Its byte array does not need to be initialized by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getPrevNoDup(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_PREV_NODUP | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Return the record number associated with the cursor. The record number will be returned in the data parameter. <p> For this method to be called, the underlying database must be of type Btree, and it must have been configured to support record numbers. <p> When called on a cursor opened on a database that has been made into a secondary index, the method returns the record numbers of both the secondary and primary databases. If either underlying database is not of type Btree or is not configured with record numbers, the out-of-band record number of 0 is returned. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param secondaryRecno the secondary record number returned as output. Its byte array does not need to be initialized by the caller. @param primaryRecno the primary record number returned as output. Its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getRecordNumber(final DatabaseEntry secondaryRecno, final DatabaseEntry primaryRecno, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(DatabaseEntry.IGNORE, secondaryRecno, primaryRecno, DbConstants.DB_GET_RECNO | LockMode.getFlag(lockMode))); } /** Move the cursor to the given key of the database, and return the datum associated with the given key. If the matching key has duplicate values, the first data item in the set of duplicates is returned. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key used as input. It must be initialized with a non-null byte array by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Multiple results can be retrieved by passing an object that is a subclass of {@link com.sleepycat.db.MultipleEntry MultipleEntry}, otherwise its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getSearchKey(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_SET | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Move the cursor to the closest matching key of the database, and return the data item associated with the matching key. If the matching key has duplicate values, the first data item in the set of duplicates is returned. <p> The returned key/data pair is for the smallest key greater than or equal to the specified key (as determined by the key comparison function), permitting partial key matches and range searches. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key used as input and returned as output. It must be initialized with a non-null byte array by the caller. @param pKey the primary key returned as output. Its byte array does not need to be initialized by the caller. @param data the primary data returned as output. Multiple results can be retrieved by passing an object that is a subclass of {@link com.sleepycat.db.MultipleEntry MultipleEntry}, otherwise its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getSearchKeyRange(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_SET_RANGE | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Move the cursor to the specified secondary and primary key, where both the primary and secondary key items must match. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key used as input. It must be initialized with a non-null byte array by the caller. @param pKey the primary key used as input. It must be initialized with a non-null byte array by the caller. @param data the primary data returned as output. Its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getSearchBoth(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_GET_BOTH | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Move the cursor to the specified secondary key and closest matching primary key of the database. <p> In the case of any database supporting sorted duplicate sets, the returned key/data pair is for the smallest primary key greater than or equal to the specified primary key (as determined by the key comparison function), permitting partial matches and range searches in duplicate data sets. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param key the secondary key used as input and returned as output. It must be initialized with a non-null byte array by the caller. @param pKey the primary key used as input and returned as output. It must be initialized with a non-null byte array by the caller. @param data the primary data returned as output. Its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getSearchBothRange(final DatabaseEntry key, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(key, pKey, data, DbConstants.DB_GET_BOTH_RANGE | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } /** Move the cursor to the specific numbered record of the database, and return the associated key/data pair. <p> The data field of the specified key must be a byte array containing a record number, as described in {@link com.sleepycat.db.DatabaseEntry DatabaseEntry}. This determines the record to be retrieved. <p> For this method to be called, the underlying database must be of type Btree, and it must have been configured to support record numbers. <p> If this method fails for any reason, the position of the cursor will be unchanged. @throws NullPointerException if a DatabaseEntry parameter is null or does not contain a required non-null byte array. <p> @throws DeadlockException if the operation was selected to resolve a deadlock. <p> @throws IllegalArgumentException if an invalid parameter was specified. <p> @throws DatabaseException if a failure occurs. <p> @param secondaryRecno the secondary record number used as input. It must be initialized with a non-null byte array by the caller. @param data the primary data returned as output. Multiple results can be retrieved by passing an object that is a subclass of {@link com.sleepycat.db.MultipleEntry MultipleEntry}, otherwise its byte array does not need to be initialized by the caller. @param lockMode the locking attributes; if null, default attributes are used. @return {@link com.sleepycat.db.OperationStatus#NOTFOUND OperationStatus.NOTFOUND} if no matching key/data pair is found; {@link com.sleepycat.db.OperationStatus#KEYEMPTY OperationStatus.KEYEMPTY} if the database is a Queue or Recno database and the specified key exists, but was never explicitly created by the application or was later deleted; otherwise, {@link com.sleepycat.db.OperationStatus#SUCCESS OperationStatus.SUCCESS}. */ public OperationStatus getSearchRecordNumber( final DatabaseEntry secondaryRecno, final DatabaseEntry pKey, final DatabaseEntry data, LockMode lockMode) throws DatabaseException { return OperationStatus.fromInt( dbc.pget(secondaryRecno, pKey, data, DbConstants.DB_SET_RECNO | LockMode.getFlag(lockMode) | ((data == null) ? 0 : data.getMultiFlag()))); } }
/* * Copyright (c) 2003, PostgreSQL Global Development Group * See the LICENSE file in the project root for more information. */ // Copyright (c) 2004, Open Cloud Limited. package org.postgresql.sspi; import org.postgresql.core.PGStream; import org.postgresql.util.HostSpec; import org.postgresql.util.PSQLException; import org.postgresql.util.PSQLState; import com.sun.jna.LastErrorException; import com.sun.jna.Platform; import com.sun.jna.platform.win32.Sspi; import com.sun.jna.platform.win32.Sspi.SecBufferDesc; import com.sun.jna.platform.win32.Win32Exception; import waffle.windows.auth.IWindowsCredentialsHandle; import waffle.windows.auth.impl.WindowsCredentialsHandleImpl; import waffle.windows.auth.impl.WindowsSecurityContextImpl; import java.io.IOException; import java.sql.SQLException; import java.util.logging.Level; import java.util.logging.Logger; /** * <p>Use Waffle-JNI to support SSPI authentication when PgJDBC is running on a Windows client and * talking to a Windows server.</p> * * <p>SSPI is not supported on a non-Windows client.</p> * * @author craig */ public class SSPIClient implements ISSPIClient { public static final String SSPI_DEFAULT_SPN_SERVICE_CLASS = "POSTGRES"; private static final Logger LOGGER = Logger.getLogger(SSPIClient.class.getName()); private final PGStream pgStream; private final String spnServiceClass; private final boolean enableNegotiate; private IWindowsCredentialsHandle clientCredentials; private WindowsSecurityContextImpl sspiContext; private String targetName; /** * <p>Instantiate an SSPIClient for authentication of a connection.</p> * * <p>SSPIClient is not re-usable across connections.</p> * * <p>It is safe to instantiate SSPIClient even if Waffle and JNA are missing or on non-Windows * platforms, however you may not call any methods other than isSSPISupported().</p> * * @param pgStream PostgreSQL connection stream * @param spnServiceClass SSPI SPN service class, defaults to POSTGRES if null * @param enableNegotiate enable negotiate */ public SSPIClient(PGStream pgStream, String spnServiceClass, boolean enableNegotiate) { this.pgStream = pgStream; if (spnServiceClass == null || spnServiceClass.isEmpty()) { spnServiceClass = SSPI_DEFAULT_SPN_SERVICE_CLASS; } this.spnServiceClass = spnServiceClass; /* If we're forcing Kerberos (no spnego), disable SSPI negotiation */ this.enableNegotiate = enableNegotiate; } /** * Test whether we can attempt SSPI authentication. If false, do not attempt to call any other * SSPIClient methods. * * @return true if it's safe to attempt SSPI authentication */ @Override public boolean isSSPISupported() { try { /* * SSPI is windows-only. Attempt to use JNA to identify the platform. If Waffle is missing we * won't have JNA and this will throw a NoClassDefFoundError. */ if (!Platform.isWindows()) { LOGGER.log(Level.FINE, "SSPI not supported: non-Windows host"); return false; } /* Waffle must be on the CLASSPATH */ Class.forName("waffle.windows.auth.impl.WindowsSecurityContextImpl"); return true; } catch (NoClassDefFoundError ex) { LOGGER.log(Level.WARNING, "SSPI unavailable (no Waffle/JNA libraries?)", ex); return false; } catch (ClassNotFoundException ex) { LOGGER.log(Level.WARNING, "SSPI unavailable (no Waffle/JNA libraries?)", ex); return false; } } private String makeSPN() throws PSQLException { final HostSpec hs = pgStream.getHostSpec(); try { /* The GSSAPI implementation does not use the port in the service name. Force the port number to 0 Fixes issue 1482 */ return NTDSAPIWrapper.instance.DsMakeSpn(spnServiceClass, hs.getHost(), null, (short) 0, null); } catch (LastErrorException ex) { throw new PSQLException("SSPI setup failed to determine SPN", PSQLState.CONNECTION_UNABLE_TO_CONNECT, ex); } } /** * Respond to an authentication request from the back-end for SSPI authentication (AUTH_REQ_SSPI). * * @throws SQLException on SSPI authentication handshake failure * @throws IOException on network I/O issues */ @Override public void startSSPI() throws SQLException, IOException { /* * We usually use SSPI negotiation (spnego), but it's disabled if the client asked for GSSPI and * usespngo isn't explicitly turned on. */ final String securityPackage = enableNegotiate ? "negotiate" : "kerberos"; LOGGER.log(Level.FINEST, "Beginning SSPI/Kerberos negotiation with SSPI package: {0}", securityPackage); try { /* * Acquire a handle for the local Windows login credentials for the current user * * See AcquireCredentialsHandle * (http://msdn.microsoft.com/en-us/library/windows/desktop/aa374712%28v=vs.85%29.aspx) * * This corresponds to pg_SSPI_startup in libpq/fe-auth.c . */ try { clientCredentials = WindowsCredentialsHandleImpl.getCurrent(securityPackage); clientCredentials.initialize(); } catch (Win32Exception ex) { throw new PSQLException("Could not obtain local Windows credentials for SSPI", PSQLState.CONNECTION_UNABLE_TO_CONNECT /* TODO: Should be authentication error */, ex); } try { targetName = makeSPN(); LOGGER.log(Level.FINEST, "SSPI target name: {0}", targetName); sspiContext = new WindowsSecurityContextImpl(); sspiContext.setPrincipalName(targetName); sspiContext.setCredentialsHandle(clientCredentials); sspiContext.setSecurityPackage(securityPackage); sspiContext.initialize(null, null, targetName); } catch (Win32Exception ex) { throw new PSQLException("Could not initialize SSPI security context", PSQLState.CONNECTION_UNABLE_TO_CONNECT /* TODO: Should be auth error */, ex); } sendSSPIResponse(sspiContext.getToken()); LOGGER.log(Level.FINEST, "Sent first SSPI negotiation message"); } catch (NoClassDefFoundError ex) { throw new PSQLException( "SSPI cannot be used, Waffle or its dependencies are missing from the classpath", PSQLState.NOT_IMPLEMENTED, ex); } } /** * Continue an existing authentication conversation with the back-end in response to an * authentication request of type AUTH_REQ_GSS_CONT. * * @param msgLength Length of message to read, excluding length word and message type word * @throws SQLException if something wrong happens * @throws IOException if something wrong happens */ @Override public void continueSSPI(int msgLength) throws SQLException, IOException { if (sspiContext == null) { throw new IllegalStateException("Cannot continue SSPI authentication that we didn't begin"); } LOGGER.log(Level.FINEST, "Continuing SSPI negotiation"); /* Read the response token from the server */ byte[] receivedToken = pgStream.receive(msgLength); SecBufferDesc continueToken = new SecBufferDesc(Sspi.SECBUFFER_TOKEN, receivedToken); sspiContext.initialize(sspiContext.getHandle(), continueToken, targetName); /* * Now send the response token. If negotiation is complete there may be zero bytes to send, in * which case we shouldn't send a reply as the server is not expecting one; see fe-auth.c in * libpq for details. */ byte[] responseToken = sspiContext.getToken(); if (responseToken.length > 0) { sendSSPIResponse(responseToken); LOGGER.log(Level.FINEST, "Sent SSPI negotiation continuation message"); } else { LOGGER.log(Level.FINEST, "SSPI authentication complete, no reply required"); } } private void sendSSPIResponse(byte[] outToken) throws IOException { /* * The sspiContext now contains a token we can send to the server to start the handshake. Send a * 'password' message containing the required data; the server knows we're doing SSPI * negotiation and will deal with it appropriately. */ pgStream.sendChar('p'); pgStream.sendInteger4(4 + outToken.length); pgStream.send(outToken); pgStream.flush(); } /** * Clean up native win32 resources after completion or failure of SSPI authentication. This * SSPIClient instance becomes unusable after disposal. */ @Override public void dispose() { if (sspiContext != null) { sspiContext.dispose(); sspiContext = null; } if (clientCredentials != null) { clientCredentials.dispose(); clientCredentials = null; } } }
/* Copyright (C) 2005-2011 Fabio Riccardi */ package com.lightcrafts.utils.cache; import java.io.IOException; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Map; /** * A <code>Cache</code> is like a {@link Map} but with two differences: * <ul> * <li>More objects than can fit into memory can be cached. * <li>Unlike a map, once you get an object, it's no longer in the cache. * </ul> * * @author Paul J. Lucas [paul@lightcrafts.com] */ public final class Cache { ////////// public ///////////////////////////////////////////////////////// /** * Construct a <code>Cache</code>. * * @param objBroker The {@link CacheObjectBroker} to use. * @param objMap The {@link CacheObjectMap} to use. * @param store The {@link CacheStore} to use. * @param freeBlockMgr The {@link FreeBlockManager} to use. */ public Cache( CacheObjectBroker objBroker, CacheObjectMap objMap, CacheStore store, FreeBlockManager freeBlockMgr ) { m_blockMap = new HashMap<Object,CacheBlock>(); m_freeBlockMgr = freeBlockMgr; m_objBroker = objBroker; m_objMap = objMap; m_objMap.setCache( this ); m_store = store; } /** * Removes all objects from the cache. */ public synchronized void clear() throws IOException { m_blockMap.clear(); m_freeBlockMgr.clear(); m_objMap.clear(); m_store.clear(); } /** * Checks whether the cache contains a particular object. * * @param key The object's key. * @return Returns <code>true</code> only if the object is in the cache. */ public synchronized boolean contains( Object key ) { return m_objMap.contains( key ); } /** * Disposes of this <code>Cache</code>. */ public synchronized void dispose() throws IOException { m_objMap.dispose(); m_store.dispose(); } /** * Gets an object from the cache. Once gotten, the object is removed. * * @param key The object's key. * @param aux An auxiliary object passed through to * {@link CacheObjectBroker#decodeFromByteBuffer(ByteBuffer, Object)}. * An implementation of {@link CacheObjectBroker} can use this object for * any purpose. * @return Returns the relevant object or <code>null</code> if there is no * object in the cache with the given key. */ public Object getOnce( Object key, Object aux ) throws IOException { return m_objMap.getOnce( key, aux ); } /** * Gets the {@link CacheObjectBroker} in use by the <code>Cache</code>. * * @return Returns said {@link CacheObjectBroker}. */ public CacheObjectBroker getCacheObjectBroker() { return m_objBroker; } /** * Gets the {@link CacheObjectMap} in use by the <code>Cache</code>. * * @return Returns said {@link CacheObjectMap}. */ public CacheObjectMap getCacheObjectMap() { return m_objMap; } /** * Gets the {@link CacheStore} in use. * * @return Returns said {@link CacheStore}. */ public CacheStore getCacheStore() { return m_store; } /** * Gets the {@link FreeBlockManager} in use by the <code>Cache</code>. * * @return Returns said {@link CacheObjectMap}. */ public FreeBlockManager getFreeBlockManager() { return m_freeBlockMgr; } /** * Puts an object into the cache. * * @param key The object's key. * @param obj The object to put. */ public void put( Object key, Object obj ) throws IOException { m_objMap.put( key, obj ); } /** * Removes an object from the cache. * * @param key The object's key. * @return Returns <code>true</code> only if the object was in the cache * and removed. */ public boolean remove( Object key ) { return m_objMap.remove( key ); } /** * Reads an object from the {@link CacheStore} being used. This method is * meant to be called only by methods of classes that implement the * {@link CacheObjectMap} interface. * * @param key The object's key. * @param aux An auxiliary object passed through to * {@link CacheObjectBroker#decodeFromByteBuffer(ByteBuffer,Object)}. * An implementation of {@link CacheObjectBroker} can use this object for * any purpose. * @return Returns the read object. */ public synchronized Object readFromStore( Object key, Object aux ) throws IOException { final CacheBlock block = m_blockMap.remove( key ); if ( block == null ) return null; final ByteBuffer buf = m_store.getByteBuffer( block.getSize() ); m_store.readFromStore( block.getPosition(), buf ); final Object obj = m_objBroker.decodeFromByteBuffer( buf, aux ); m_freeBlockMgr.freeBlock( block ); return obj; } /** * Removes an object from the {@link CacheStore} being used. This method * is meant to be called only by methods of classes that implement the * {@link CacheObjectMap} interface. * * @param key The object's key. * @return Returns <code>true</code> only if the object was removed. */ public synchronized boolean removeFromStore( Object key ) { final CacheBlock block = m_blockMap.remove( key ); if ( block != null ) { m_freeBlockMgr.freeBlock( block ); return true; } return false; } /** * Checks whether the {@link CacheStore} contains a particular object. * * @param key The object's key. * @return Returns <code>true</code> only if the {@link CacheStore} * contains an object having the given key. */ public synchronized boolean storeContains( Object key ) { return m_blockMap.containsKey( key ); } /** * Writes and object to the {@link CacheStore} being used. This method is * meant to be called only by methods of classes that implement the * {@link CacheObjectMap} interface. * * @param key The object's key. * @param buf The encoded object to write. */ public synchronized void writeToStore( Object key, ByteBuffer buf ) throws IOException { final int objSize = buf.limit(); CacheBlock block = m_freeBlockMgr.findBlockOfSize( objSize ); if ( block == null ) { // // There are no free blocks available: create a new one at the end // of the store. // // The calls to getSize() and writeToStore() must be together in a // synchronized block. // synchronized ( m_store ) { block = new CacheBlock( m_store.getSize(), objSize ); m_store.writeToStore( block.getPosition(), buf ); } } else m_store.writeToStore( block.getPosition(), buf ); m_blockMap.put( key, block ); } ////////// protected ////////////////////////////////////////////////////// /** * Finalize a <code>Cache</code>. */ protected void finalize() throws Throwable { super.finalize(); dispose(); } ////////// private //////////////////////////////////////////////////////// /** * A mapping for those objects that have been cached. The key is the * object's key and the value is the relevant {@link CacheBlock}. */ private final Map<Object,CacheBlock> m_blockMap; /** * The {@link FreeBlockManager} to use. */ private final FreeBlockManager m_freeBlockMgr; /** * The {@link CacheObjectBroker} to use. */ private final CacheObjectBroker m_objBroker; /** * The {@link CacheObjectMap} to use. */ private final CacheObjectMap m_objMap; /** * The {@link CacheStore} to use. */ private final CacheStore m_store; } /* vim:set et sw=4 ts=4: */
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mediapackagevod.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * A CMAF packaging configuration. * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/mediapackage-vod-2018-11-07/CmafPackage" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CmafPackage implements Serializable, Cloneable, StructuredPojo { private CmafEncryption encryption; /** A list of HLS manifest configurations. */ private java.util.List<HlsManifest> hlsManifests; /** * Duration (in seconds) of each fragment. Actual fragments will be rounded to the nearest multiple of the source * fragment duration. */ private Integer segmentDurationSeconds; /** * @param encryption */ public void setEncryption(CmafEncryption encryption) { this.encryption = encryption; } /** * @return */ public CmafEncryption getEncryption() { return this.encryption; } /** * @param encryption * @return Returns a reference to this object so that method calls can be chained together. */ public CmafPackage withEncryption(CmafEncryption encryption) { setEncryption(encryption); return this; } /** * A list of HLS manifest configurations. * * @return A list of HLS manifest configurations. */ public java.util.List<HlsManifest> getHlsManifests() { return hlsManifests; } /** * A list of HLS manifest configurations. * * @param hlsManifests * A list of HLS manifest configurations. */ public void setHlsManifests(java.util.Collection<HlsManifest> hlsManifests) { if (hlsManifests == null) { this.hlsManifests = null; return; } this.hlsManifests = new java.util.ArrayList<HlsManifest>(hlsManifests); } /** * A list of HLS manifest configurations. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setHlsManifests(java.util.Collection)} or {@link #withHlsManifests(java.util.Collection)} if you want to * override the existing values. * </p> * * @param hlsManifests * A list of HLS manifest configurations. * @return Returns a reference to this object so that method calls can be chained together. */ public CmafPackage withHlsManifests(HlsManifest... hlsManifests) { if (this.hlsManifests == null) { setHlsManifests(new java.util.ArrayList<HlsManifest>(hlsManifests.length)); } for (HlsManifest ele : hlsManifests) { this.hlsManifests.add(ele); } return this; } /** * A list of HLS manifest configurations. * * @param hlsManifests * A list of HLS manifest configurations. * @return Returns a reference to this object so that method calls can be chained together. */ public CmafPackage withHlsManifests(java.util.Collection<HlsManifest> hlsManifests) { setHlsManifests(hlsManifests); return this; } /** * Duration (in seconds) of each fragment. Actual fragments will be rounded to the nearest multiple of the source * fragment duration. * * @param segmentDurationSeconds * Duration (in seconds) of each fragment. Actual fragments will be rounded to the nearest multiple of the * source fragment duration. */ public void setSegmentDurationSeconds(Integer segmentDurationSeconds) { this.segmentDurationSeconds = segmentDurationSeconds; } /** * Duration (in seconds) of each fragment. Actual fragments will be rounded to the nearest multiple of the source * fragment duration. * * @return Duration (in seconds) of each fragment. Actual fragments will be rounded to the nearest multiple of the * source fragment duration. */ public Integer getSegmentDurationSeconds() { return this.segmentDurationSeconds; } /** * Duration (in seconds) of each fragment. Actual fragments will be rounded to the nearest multiple of the source * fragment duration. * * @param segmentDurationSeconds * Duration (in seconds) of each fragment. Actual fragments will be rounded to the nearest multiple of the * source fragment duration. * @return Returns a reference to this object so that method calls can be chained together. */ public CmafPackage withSegmentDurationSeconds(Integer segmentDurationSeconds) { setSegmentDurationSeconds(segmentDurationSeconds); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEncryption() != null) sb.append("Encryption: ").append(getEncryption()).append(","); if (getHlsManifests() != null) sb.append("HlsManifests: ").append(getHlsManifests()).append(","); if (getSegmentDurationSeconds() != null) sb.append("SegmentDurationSeconds: ").append(getSegmentDurationSeconds()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CmafPackage == false) return false; CmafPackage other = (CmafPackage) obj; if (other.getEncryption() == null ^ this.getEncryption() == null) return false; if (other.getEncryption() != null && other.getEncryption().equals(this.getEncryption()) == false) return false; if (other.getHlsManifests() == null ^ this.getHlsManifests() == null) return false; if (other.getHlsManifests() != null && other.getHlsManifests().equals(this.getHlsManifests()) == false) return false; if (other.getSegmentDurationSeconds() == null ^ this.getSegmentDurationSeconds() == null) return false; if (other.getSegmentDurationSeconds() != null && other.getSegmentDurationSeconds().equals(this.getSegmentDurationSeconds()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEncryption() == null) ? 0 : getEncryption().hashCode()); hashCode = prime * hashCode + ((getHlsManifests() == null) ? 0 : getHlsManifests().hashCode()); hashCode = prime * hashCode + ((getSegmentDurationSeconds() == null) ? 0 : getSegmentDurationSeconds().hashCode()); return hashCode; } @Override public CmafPackage clone() { try { return (CmafPackage) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.mediapackagevod.model.transform.CmafPackageMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.test; import static org.apache.zookeeper.client.FourLetterWordMain.send4LetterWord; import java.io.File; import java.io.IOException; import java.lang.management.ManagementFactory; import java.lang.management.OperatingSystemMXBean; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import javax.management.MBeanServerConnection; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.PortAssignment; import org.apache.zookeeper.TestableZooKeeper; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.Watcher.Event.KeeperState; import org.apache.zookeeper.ZKTestCase; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.server.ServerCnxnFactory; import org.apache.zookeeper.server.ServerCnxnFactoryAccessor; import org.apache.zookeeper.server.ZKDatabase; import org.apache.zookeeper.server.ZooKeeperServer; import org.apache.zookeeper.server.persistence.FileTxnLog; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.sun.management.UnixOperatingSystemMXBean; public abstract class ClientBase extends ZKTestCase { protected static final Logger LOG = LoggerFactory.getLogger(ClientBase.class); public static int CONNECTION_TIMEOUT = 30000; static final File BASETEST = new File(System.getProperty("build.test.dir", "build")); protected String hostPort = "127.0.0.1:" + PortAssignment.unique(); protected int maxCnxns = 0; protected ServerCnxnFactory serverFactory = null; protected File tmpDir = null; long initialFdCount; public ClientBase() { super(); } /** * In general don't use this. Only use in the special case that you * want to ignore results (for whatever reason) in your test. Don't * use empty watchers in real code! * */ protected class NullWatcher implements Watcher { public void process(WatchedEvent event) { /* nada */ } } protected static class CountdownWatcher implements Watcher { // XXX this doesn't need to be volatile! (Should probably be final) volatile CountDownLatch clientConnected; volatile boolean connected; public CountdownWatcher() { reset(); } synchronized public void reset() { clientConnected = new CountDownLatch(1); connected = false; } synchronized public void process(WatchedEvent event) { if (event.getState() == KeeperState.SyncConnected || event.getState() == KeeperState.ConnectedReadOnly) { connected = true; notifyAll(); clientConnected.countDown(); } else { connected = false; notifyAll(); } } synchronized boolean isConnected() { return connected; } synchronized void waitForConnected(long timeout) throws InterruptedException, TimeoutException { long expire = System.currentTimeMillis() + timeout; long left = timeout; while(!connected && left > 0) { wait(left); left = expire - System.currentTimeMillis(); } if (!connected) { throw new TimeoutException("Did not connect"); } } synchronized void waitForDisconnected(long timeout) throws InterruptedException, TimeoutException { long expire = System.currentTimeMillis() + timeout; long left = timeout; while(connected && left > 0) { wait(left); left = expire - System.currentTimeMillis(); } if (connected) { throw new TimeoutException("Did not disconnect"); } } } protected TestableZooKeeper createClient() throws IOException, InterruptedException { return createClient(hostPort); } protected TestableZooKeeper createClient(String hp) throws IOException, InterruptedException { CountdownWatcher watcher = new CountdownWatcher(); return createClient(watcher, hp); } private LinkedList<ZooKeeper> allClients; private boolean allClientsSetup = false; protected TestableZooKeeper createClient(CountdownWatcher watcher, String hp) throws IOException, InterruptedException { return createClient(watcher, hp, CONNECTION_TIMEOUT); } protected TestableZooKeeper createClient(CountdownWatcher watcher, String hp, int timeout) throws IOException, InterruptedException { watcher.reset(); TestableZooKeeper zk = new TestableZooKeeper(hp, timeout, watcher); if (!watcher.clientConnected.await(timeout, TimeUnit.MILLISECONDS)) { Assert.fail("Unable to connect to server"); } synchronized(this) { if (!allClientsSetup) { LOG.error("allClients never setup"); Assert.fail("allClients never setup"); } if (allClients != null) { allClients.add(zk); } else { // test done - close the zk, not needed zk.close(); } } JMXEnv.ensureAll("0x" + Long.toHexString(zk.getSessionId())); return zk; } public static class HostPort { String host; int port; public HostPort(String host, int port) { this.host = host; this.port = port; } } public static List<HostPort> parseHostPortList(String hplist) { ArrayList<HostPort> alist = new ArrayList<HostPort>(); for (String hp: hplist.split(",")) { int idx = hp.lastIndexOf(':'); String host = hp.substring(0, idx); int port; try { port = Integer.parseInt(hp.substring(idx + 1)); } catch(RuntimeException e) { throw new RuntimeException("Problem parsing " + hp + e.toString()); } alist.add(new HostPort(host,port)); } return alist; } public static boolean waitForServerUp(String hp, long timeout) { long start = System.currentTimeMillis(); while (true) { try { // if there are multiple hostports, just take the first one HostPort hpobj = parseHostPortList(hp).get(0); String result = send4LetterWord(hpobj.host, hpobj.port, "stat"); if (result.startsWith("Zookeeper version:") && !result.contains("READ-ONLY")) { return true; } } catch (IOException e) { // ignore as this is expected LOG.info("server " + hp + " not up " + e); } if (System.currentTimeMillis() > start + timeout) { break; } try { Thread.sleep(250); } catch (InterruptedException e) { // ignore } } return false; } public static boolean waitForServerDown(String hp, long timeout) { long start = System.currentTimeMillis(); while (true) { try { HostPort hpobj = parseHostPortList(hp).get(0); send4LetterWord(hpobj.host, hpobj.port, "stat"); } catch (IOException e) { return true; } if (System.currentTimeMillis() > start + timeout) { break; } try { Thread.sleep(250); } catch (InterruptedException e) { // ignore } } return false; } static void verifyThreadTerminated(Thread thread, long millis) throws InterruptedException { thread.join(millis); if (thread.isAlive()) { LOG.error("Thread " + thread.getName() + " : " + Arrays.toString(thread.getStackTrace())); Assert.assertFalse("thread " + thread.getName() + " still alive after join", true); } } public static File createTmpDir() throws IOException { return createTmpDir(BASETEST); } static File createTmpDir(File parentDir) throws IOException { File tmpFile = File.createTempFile("test", ".junit", parentDir); // don't delete tmpFile - this ensures we don't attempt to create // a tmpDir with a duplicate name File tmpDir = new File(tmpFile + ".dir"); Assert.assertFalse(tmpDir.exists()); // never true if tmpfile does it's job Assert.assertTrue(tmpDir.mkdirs()); return tmpDir; } private static int getPort(String hostPort) { String[] split = hostPort.split(":"); String portstr = split[split.length-1]; String[] pc = portstr.split("/"); if (pc.length > 1) { portstr = pc[0]; } return Integer.parseInt(portstr); } public static ServerCnxnFactory createNewServerInstance(File dataDir, ServerCnxnFactory factory, String hostPort, int maxCnxns) throws IOException, InterruptedException { ZooKeeperServer zks = new ZooKeeperServer(dataDir, dataDir, 3000); final int PORT = getPort(hostPort); if (factory == null) { factory = ServerCnxnFactory.createFactory(PORT, maxCnxns); } factory.startup(zks); Assert.assertTrue("waiting for server up", ClientBase.waitForServerUp("127.0.0.1:" + PORT, CONNECTION_TIMEOUT)); return factory; } static void shutdownServerInstance(ServerCnxnFactory factory, String hostPort) { if (factory != null) { ZKDatabase zkDb; { ZooKeeperServer zs = getServer(factory); zkDb = zs.getZKDatabase(); } factory.shutdown(); try { zkDb.close(); } catch (IOException ie) { LOG.warn("Error closing logs ", ie); } final int PORT = getPort(hostPort); Assert.assertTrue("waiting for server down", ClientBase.waitForServerDown("127.0.0.1:" + PORT, CONNECTION_TIMEOUT)); } } /** * Test specific setup */ public static void setupTestEnv() { // during the tests we run with 100K prealloc in the logs. // on windows systems prealloc of 64M was seen to take ~15seconds // resulting in test Assert.failure (client timeout on first session). // set env and directly in order to handle static init/gc issues System.setProperty("zookeeper.preAllocSize", "100"); FileTxnLog.setPreallocSize(100 * 1024); } protected void setUpAll() throws Exception { allClients = new LinkedList<ZooKeeper>(); allClientsSetup = true; } @Before public void setUp() throws Exception { /* some useful information - log the number of fds used before * and after a test is run. Helps to verify we are freeing resources * correctly. Unfortunately this only works on unix systems (the * only place sun has implemented as part of the mgmt bean api. */ OperatingSystemMXBean osMbean = ManagementFactory.getOperatingSystemMXBean(); if (osMbean != null && osMbean instanceof UnixOperatingSystemMXBean) { UnixOperatingSystemMXBean unixos = (UnixOperatingSystemMXBean)osMbean; initialFdCount = unixos.getOpenFileDescriptorCount(); LOG.info("Initial fdcount is: " + initialFdCount); } setupTestEnv(); JMXEnv.setUp(); setUpAll(); tmpDir = createTmpDir(BASETEST); startServer(); LOG.info("Client test setup finished"); } protected void startServer() throws Exception { LOG.info("STARTING server"); serverFactory = createNewServerInstance(tmpDir, serverFactory, hostPort, maxCnxns); // ensure that only server and data bean are registered JMXEnv.ensureOnly("InMemoryDataTree", "StandaloneServer_port"); } protected void stopServer() throws Exception { LOG.info("STOPPING server"); shutdownServerInstance(serverFactory, hostPort); serverFactory = null; // ensure no beans are leftover JMXEnv.ensureOnly(); } protected static ZooKeeperServer getServer(ServerCnxnFactory fac) { ZooKeeperServer zs = ServerCnxnFactoryAccessor.getZkServer(fac); return zs; } protected void tearDownAll() throws Exception { synchronized (this) { if (allClients != null) for (ZooKeeper zk : allClients) { try { if (zk != null) zk.close(); } catch (InterruptedException e) { LOG.warn("ignoring interrupt", e); } } allClients = null; } } @After public void tearDown() throws Exception { LOG.info("tearDown starting"); tearDownAll(); stopServer(); if (tmpDir != null) { Assert.assertTrue("delete " + tmpDir.toString(), recursiveDelete(tmpDir)); } // This has to be set to null when the same instance of this class is reused between test cases serverFactory = null; JMXEnv.tearDown(); /* some useful information - log the number of fds used before * and after a test is run. Helps to verify we are freeing resources * correctly. Unfortunately this only works on unix systems (the * only place sun has implemented as part of the mgmt bean api. */ OperatingSystemMXBean osMbean = ManagementFactory.getOperatingSystemMXBean(); if (osMbean != null && osMbean instanceof UnixOperatingSystemMXBean) { UnixOperatingSystemMXBean unixos = (UnixOperatingSystemMXBean)osMbean; long fdCount = unixos.getOpenFileDescriptorCount(); String message = "fdcount after test is: " + fdCount + " at start it was " + initialFdCount; LOG.info(message); if (fdCount > initialFdCount) { LOG.info("sleeping for 20 secs"); //Thread.sleep(60000); //assertTrue(message, fdCount <= initialFdCount); } } } public static MBeanServerConnection jmxConn() throws IOException { return JMXEnv.conn(); } public static boolean recursiveDelete(File d) { if (d.isDirectory()) { File children[] = d.listFiles(); for (File f : children) { Assert.assertTrue("delete " + f.toString(), recursiveDelete(f)); } } return d.delete(); } public static void logAllStackTraces() { StringBuilder sb = new StringBuilder(); sb.append("Starting logAllStackTraces()\n"); Map<Thread, StackTraceElement[]> threads = Thread.getAllStackTraces(); for (Entry<Thread, StackTraceElement[]> e: threads.entrySet()) { sb.append("Thread " + e.getKey().getName() + "\n"); for (StackTraceElement elem: e.getValue()) { sb.append("\tat " + elem + "\n"); } } sb.append("Ending logAllStackTraces()\n"); LOG.error(sb.toString()); } /* * Verify that all of the servers see the same number of nodes * at the root */ void verifyRootOfAllServersMatch(String hostPort) throws InterruptedException, KeeperException, IOException { String parts[] = hostPort.split(","); // run through till the counts no longer change on each server // max 15 tries, with 2 second sleeps, so approx 30 seconds int[] counts = new int[parts.length]; int failed = 0; for (int j = 0; j < 100; j++) { int newcounts[] = new int[parts.length]; int i = 0; for (String hp : parts) { try { ZooKeeper zk = createClient(hp); try { newcounts[i++] = zk.getChildren("/", false).size(); } finally { zk.close(); } } catch (Throwable t) { failed++; // if session creation Assert.fails dump the thread stack // and try the next server logAllStackTraces(); } } if (Arrays.equals(newcounts, counts)) { LOG.info("Found match with array:" + Arrays.toString(newcounts)); counts = newcounts; break; } else { counts = newcounts; Thread.sleep(10000); } // don't keep this up too long, will Assert.assert false below if (failed > 10) { break; } } // verify all the servers reporting same number of nodes String logmsg = "node count not consistent{} {}"; for (int i = 1; i < parts.length; i++) { if (counts[i-1] != counts[i]) { LOG.error(logmsg, Integer.valueOf(counts[i-1]), Integer.valueOf(counts[i])); } else { LOG.info(logmsg, Integer.valueOf(counts[i-1]), Integer.valueOf(counts[i])); } } } }
package com.furnaghan.exif.parser; import static com.furnaghan.exif.parser.ExifParser.EXIF_NAME; import static com.furnaghan.exif.parser.ExifParser.IFD_TAGS; import static com.furnaghan.exif.parser.ExifParser.TIFF_MARKER; import static com.furnaghan.exif.parser.ExifParser.VERBOSE; import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteOrder; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.furnaghan.exif.ExifTagReference; import com.furnaghan.exif.ExifTags; import com.furnaghan.exif.ImageFileDirectory; import com.furnaghan.exif.io.StreamWriter; import com.furnaghan.exif.tag.Thumbnail; public class ExifWriter implements Closeable { private static final Logger LOG = LoggerFactory.getLogger( ExifWriter.class ); private final StreamWriter data; public ExifWriter( final OutputStream out ) { this.data = new StreamWriter( out, ByteOrder.BIG_ENDIAN, VERBOSE ); } private void writeHeader() throws IOException { LOG.info( "Writing header at {}", data.offset() ); // Write the start of the exif data data.writeString( EXIF_NAME ); data.writeShort( 0 ); // Mark the start of the TIFF data data.mark(); // Write the byte order for the remaining data data.setByteOrder( ByteOrder.LITTLE_ENDIAN ); data.writeByteOrder( data.getByteOrder() ); // Write TIFF marker data.writeShort( TIFF_MARKER ); // Write the IFD0 offset in the TIFF header final int exifOffset = data.offset() + 4; data.writeInt( exifOffset ); } private void writeSubDirectory( final ImageFileDirectory ifd, final Map<ExifTagReference, Collection<Object>> tags ) throws IOException { LOG.info( "Writing {} IFD at {}", ifd, data.offset() ); final int numTags = tags.size(); data.writeShort( numTags ); int endOffset = data.offset() + ( numTags * 12 ) + 4; final List<byte[]> blobs = new LinkedList<>(); LOG.info( "Writing {} entries at offset={} in IFD={}", numTags, data.offset(), ifd ); for ( final Map.Entry<ExifTagReference, Collection<Object>> entry : tags.entrySet() ) { final byte[] blob = writeTag( entry.getKey(), entry.getValue(), endOffset ); if ( blob != null ) { endOffset += blob.length; blobs.add( blob ); } } data.writeInt( 0 ); for ( final byte[] bytes : blobs ) { data.writeBytes( bytes ); } } private void writeThumbnailIFD( final ExifTags exif ) throws IOException { LOG.info( "Writing thumbnail IFD at {}", data.offset() ); final Collection<byte[]> thumbnails = exif.getThumbnails(); final Map<ExifTagReference, Collection<Object>> tags = new HashMap<>( exif.getDirectory( ImageFileDirectory.Thumbnail ) ); // Remove any thumbnail tags that made it in here... tags.remove( Thumbnail.JPEGInterchangeFormat.get() ); tags.remove( Thumbnail.JPEGInterchangeFormatLength.get() ); LOG.info( "Writing {} IFD at {}", ImageFileDirectory.Thumbnail, data.offset() ); final int numTags = tags.size() + 2; data.writeShort( numTags ); int endOffset = data.offset() + ( numTags * 12 ) + 4; final List<byte[]> blobs = new LinkedList<>(); LOG.info( "Writing {} entries at offset={} in IFD={}", numTags, data.offset(), ImageFileDirectory.Thumbnail ); for ( final Map.Entry<ExifTagReference, Collection<Object>> entry : tags.entrySet() ) { final byte[] blob = writeTag( entry.getKey(), entry.getValue(), endOffset ); if ( blob != null ) { endOffset += blob.length; blobs.add( blob ); } } // Create the thumbnail address and length tags and append the blobs. final Collection<Object> jpegInterchangeFormat = new ArrayList<>( thumbnails.size() ); final Collection<Object> jpegInterchangeFormatLength = new ArrayList<>( thumbnails.size() ); for ( final byte[] thumbnail : thumbnails ) { jpegInterchangeFormat.add( endOffset ); jpegInterchangeFormatLength.add( thumbnail.length ); endOffset += thumbnail.length; blobs.add( thumbnail ); } writeTag( Thumbnail.JPEGInterchangeFormat.get(), jpegInterchangeFormat, endOffset ); writeTag( Thumbnail.JPEGInterchangeFormatLength.get(), jpegInterchangeFormatLength, endOffset ); data.writeInt( 0 ); for ( final byte[] bytes : blobs ) { LOG.info( "Writing {} byte blob at {}", bytes.length, data.offset() ); data.writeBytes( bytes ); } } private byte[] writeTag( final ExifTagReference tag, final Collection<?> values, final int blobOffset ) throws IOException { final byte[] bytes = tag.getType().encode( values, data.getByteOrder() ); LOG.info( "Writing entry: {} = {}", tag, values ); data.writeShort( tag.getId() ); data.writeShort( tag.getType().getId() ); // Write the count. For ASCII this is the number of characters, for others this is // the number of "things" - almost always 1. data.writeInt( bytes.length / tag.getType().getSize() ); if ( bytes.length > 4 ) { data.writeInt( blobOffset ); LOG.info( "Writing sub-IFD for {} at {}", tag, blobOffset ); return bytes; } else { final byte[] temp = new byte[4]; if ( data.getByteOrder() == ByteOrder.BIG_ENDIAN ) { System.arraycopy( bytes, 0, temp, temp.length - bytes.length, bytes.length ); } else { System.arraycopy( bytes, 0, temp, 0, bytes.length ); } data.writeBytes( temp ); return null; } } public void write( final ExifTags exif ) throws IOException { // Write segment header writeHeader(); // Write image IFD0 LOG.info( "Writing main image IFD at {}", data.offset() ); final Map<ExifTagReference, Collection<Object>> image = exif.getDirectory( ImageFileDirectory.Image ); final int numTags = image.size() + IFD_TAGS.size(); data.writeShort( numTags ); int endOffset = data.offset() + ( numTags * 12 ) + 4; final List<byte[]> blobs = new LinkedList<>(); LOG.info( "Writing {} entries at offset={} in IFD={}", numTags, data.offset(), ImageFileDirectory.Image ); for ( final Map.Entry<ExifTagReference, Collection<Object>> entry : image.entrySet() ) { final byte[] blob = writeTag( entry.getKey(), entry.getValue(), endOffset ); if ( blob != null ) { endOffset += blob.length; blobs.add( blob ); } } for ( final Map.Entry<ExifTagReference, ImageFileDirectory> ifd : IFD_TAGS.entrySet() ) { // Write a future reference to the IFD we will write later writeTag( ifd.getKey(), Collections.<Object>singleton( endOffset ), endOffset ); // Figure out the size this IFD will take up so we know when the next will start endOffset += ( 2 + 4 ); for ( final Map.Entry<ExifTagReference, Collection<Object>> entry : exif.getDirectory( ifd.getValue() ).entrySet() ) { final int length = entry.getKey().getType().getSize( entry.getValue() ); endOffset += ( 12 + ( length > 4 ? length : 0 ) ); } } data.writeInt( exif.hasThumbnails() ? endOffset : 0 ); for ( final byte[] bytes : blobs ) { LOG.info( "Writing {} byte blob at {}", bytes.length, data.offset() ); data.writeBytes( bytes ); } // Write additional IFDs (GPS etc) for ( final Map.Entry<ExifTagReference, ImageFileDirectory> ifd : IFD_TAGS.entrySet() ) { writeSubDirectory( ifd.getValue(), exif.getDirectory( ifd.getValue() ) ); } // Write thumbnail IFD1 if ( exif.hasThumbnails() ) { writeThumbnailIFD( exif ); } } @Override public void close() throws IOException { data.close(); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package project; import java.util.ArrayList; import java.util.List; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; /** * * @author Ondra */ public class SubjectManagerImplTest { private SubjectManagerImpl manager = new SubjectManagerImpl(); public SubjectManagerImplTest() { } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() { } @After public void tearDown() { } /** * Test of createSubject method, of class SubjectManagerImpl. */ @Test public void testCreateSubject() { System.out.println("createSubject"); Subject subject = newSubject("Test Subject"); manager.createSubject(subject); Long subjectID = subject.getId(); assertNotNull(subjectID); manager.deleteSubject(subjectID); } /** * Test of updateSubject method, of class SubjectManagerImpl. */ @Test public void testUpdateSubject() { System.out.println("updateSubject"); Subject subject1 = newSubject("Test Subject 1"); Subject subject2 = newSubject("Test Subject 2"); manager.createSubject(subject1); manager.createSubject(subject2); Long subjectId = subject1.getId(); subject1.setName("Bank"); manager.updateSubject(subject1); subject1 = manager.getSubjectById(subjectId); assertEquals("Bank", subject1.getName()); assertDeepEquals(subject2, manager.getSubjectById(subject2.getId())); manager.deleteSubject(subjectId); manager.deleteSubject(subject2.getId()); } /** * Test of deleteSubject method, of class SubjectManagerImpl. */ @Test public void testDeleteSubject() { System.out.println("deleteSubject"); Subject subject1 = newSubject("Test Subject 1"); Subject subject2 = newSubject("Test Subject 2"); Subject subject3 = newSubject("Test Subject 3"); manager.createSubject(subject1); manager.createSubject(subject2); manager.createSubject(subject3); Long subjectId = subject1.getId(); manager.deleteSubject(subjectId); List<Subject> result = manager.findAllSubjects(); List<Subject> expectedResult = new ArrayList<>(); expectedResult.add(subject2); expectedResult.add(subject3); assertEquals(expectedResult.size(), result.size()); manager.deleteSubject(subject2.getId()); manager.deleteSubject(subject3.getId()); } /** * Test of getSubjectById method, of class SubjectManagerImpl. */ @Test public void testGetSubjectById() { System.out.println("getSubjectById"); Long id = new Long(1); Subject subject = newSubject("Test Subject"); manager.createSubject(subject); List<Subject> list = new ArrayList<>(); list.add(subject); Subject result = list.get(0); Subject subjectById = manager.getSubjectById(id); assertEquals(subjectById.getId(), result.getId()); assertEquals(subjectById.getName(), result.getName()); manager.deleteSubject(subject.getId()); } /** * Test of getSubjectByName method, of class SubjectManagerImpl. */ @Test public void testGetSubjectByName() { System.out.println("getSubjectByName"); String name1 = "Test 1"; String name2 = "Test 2"; Subject subject1 = newSubject("Test 1"); Subject subject2 = newSubject("Test 2"); Subject subject3 = newSubject("Test 3"); Subject subject4 = newSubject("Test 4"); manager.createSubject(subject1); manager.createSubject(subject2); manager.createSubject(subject3); manager.createSubject(subject4); Subject result1 = new Subject(); Subject result2 = new Subject(); result1 = manager.getSubjectByName(name1); result2 = manager.getSubjectByName(name2); assertEquals(subject1, result1); assertEquals(subject2, result2); manager.deleteSubject(subject1.getId()); manager.deleteSubject(subject2.getId()); manager.deleteSubject(subject3.getId()); manager.deleteSubject(subject4.getId()); } /** * Test of findAllSubjects method, of class SubjectManagerImpl. */ @Test public void testFindAllSubjects() { System.out.println("findAllSubjects"); Subject subject1 = newSubject("Test 1"); Subject subject2 = newSubject("Test 2"); Subject subject3 = newSubject("Test 3"); Subject subject4 = newSubject("Test 4"); manager.createSubject(subject1); manager.createSubject(subject2); manager.createSubject(subject3); manager.createSubject(subject4); List<Subject> expResult = new ArrayList<>(); List<Subject> result = new ArrayList<>(); expResult.add(subject1); expResult.add(subject2); expResult.add(subject3); expResult.add(subject4); result = manager.findAllSubjects(); assertEquals(expResult.size(), result.size()); assertDeepEquals(expResult, result); manager.deleteSubject(subject1.getId()); manager.deleteSubject(subject2.getId()); manager.deleteSubject(subject3.getId()); manager.deleteSubject(subject4.getId()); } private static Subject newSubject(String name) { Subject subject = new Subject(); subject.setName(name); return subject; } private void assertDeepEquals(Subject expected, Subject actual) { assertEquals(expected.getId(), actual.getId()); assertEquals(expected.getName(), actual.getName()); } private void assertDeepEquals(List<Subject> expectedList, List<Subject> actualList) { for (int i = 0; i < expectedList.size(); i++) { Subject expected = expectedList.get(i); Subject actual = actualList.get(i); assertDeepEquals(expected, actual); } } }
/* * Copyright 2013-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.event.listener; import com.facebook.buck.artifact_cache.ArtifactCacheEvent; import com.facebook.buck.distributed.DistBuildStatusEvent; import com.facebook.buck.distributed.thrift.BuildSlaveStatus; import com.facebook.buck.distributed.thrift.RunId; import com.facebook.buck.event.ActionGraphEvent; import com.facebook.buck.event.ArtifactCompressionEvent; import com.facebook.buck.event.ConsoleEvent; import com.facebook.buck.event.DaemonEvent; import com.facebook.buck.event.LeafEvent; import com.facebook.buck.event.LeafEvents; import com.facebook.buck.event.ParsingEvent; import com.facebook.buck.event.RuleKeyCalculationEvent; import com.facebook.buck.event.WatchmanStatusEvent; import com.facebook.buck.httpserver.WebServer; import com.facebook.buck.log.Logger; import com.facebook.buck.model.Pair; import com.facebook.buck.rules.BuildRuleEvent; import com.facebook.buck.rules.TestRunEvent; import com.facebook.buck.rules.TestStatusMessageEvent; import com.facebook.buck.rules.TestSummaryEvent; import com.facebook.buck.step.StepEvent; import com.facebook.buck.test.TestResultSummary; import com.facebook.buck.test.TestResultSummaryVerbosity; import com.facebook.buck.test.TestResults; import com.facebook.buck.test.TestStatusMessage; import com.facebook.buck.test.result.type.ResultType; import com.facebook.buck.timing.Clock; import com.facebook.buck.util.Console; import com.facebook.buck.util.MoreIterables; import com.facebook.buck.util.autosparse.AutoSparseStateEvents; import com.facebook.buck.util.environment.ExecutionEnvironment; import com.facebook.buck.util.unit.SizeUnit; import com.facebook.buck.util.versioncontrol.SparseSummary; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.common.eventbus.Subscribe; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Path; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.TimeZone; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.logging.Level; import javax.annotation.concurrent.GuardedBy; /** Console that provides rich, updating ansi output about the current build. */ public class SuperConsoleEventBusListener extends AbstractConsoleEventBusListener { /** * Maximum expected rendered line length so we can start with a decent size of line rendering * buffer. */ private static final int EXPECTED_MAXIMUM_RENDERED_LINE_LENGTH = 128; private static final Logger LOG = Logger.get(SuperConsoleEventBusListener.class); @VisibleForTesting static final String EMOJI_BUNNY = "\uD83D\uDC07"; @VisibleForTesting static final String EMOJI_DESERT = "\uD83C\uDFDD"; @VisibleForTesting static final String EMOJI_ROLODEX = "\uD83D\uDCC7"; private final Locale locale; private final Function<Long, String> formatTimeFunction; private final Optional<WebServer> webServer; private final ConcurrentMap<Long, Optional<? extends TestSummaryEvent>> threadsToRunningTestSummaryEvent; private final ConcurrentMap<Long, Optional<? extends TestStatusMessageEvent>> threadsToRunningTestStatusMessageEvent; private final ConcurrentMap<Long, Optional<? extends LeafEvent>> threadsToRunningStep; private final ConcurrentLinkedQueue<ConsoleEvent> logEvents; private final ScheduledExecutorService renderScheduler; private final TestResultFormatter testFormatter; private final AtomicInteger numPassingTests = new AtomicInteger(0); private final AtomicInteger numFailingTests = new AtomicInteger(0); private final AtomicInteger numExcludedTests = new AtomicInteger(0); private final AtomicInteger numDisabledTests = new AtomicInteger(0); private final AtomicInteger numAssumptionViolationTests = new AtomicInteger(0); private final AtomicInteger numDryRunTests = new AtomicInteger(0); private final AtomicReference<TestRunEvent.Started> testRunStarted; private final AtomicReference<TestRunEvent.Finished> testRunFinished; private final ImmutableList.Builder<String> testReportBuilder = ImmutableList.builder(); private final ImmutableList.Builder<TestStatusMessage> testStatusMessageBuilder = ImmutableList.builder(); private final AtomicBoolean anyWarningsPrinted = new AtomicBoolean(false); private final AtomicBoolean anyErrorsPrinted = new AtomicBoolean(false); private final int defaultThreadLineLimit; private final int threadLineLimitOnWarning; private final int threadLineLimitOnError; private final boolean shouldAlwaysSortThreadsByTime; private final long buildRuleMinimumDurationMillis; private final DateFormat dateFormat; private int lastNumLinesPrinted; private Optional<String> parsingStatus = Optional.empty(); private Optional<SparseSummary> autoSparseSummary = Optional.empty(); // Save if Watchman reported zero file changes in case we receive an ActionGraphCache hit. This // way the user can know that their changes, if they made any, were not picked up from Watchman. private boolean isZeroFileChanges = false; private final Object distBuildSlaveTrackerLock = new Object(); private long minimumDurationMillisecondsToShowParse; private long minimumDurationMillisecondsToShowActionGraph; private long minimumDurationMillisecondsToShowWatchman; private boolean hideEmptyDownload; @GuardedBy("distBuildSlaveTrackerLock") private final Map<RunId, BuildSlaveStatus> distBuildSlaveTracker; private final Set<String> actionGraphCacheMessage = new HashSet<>(); public SuperConsoleEventBusListener( SuperConsoleConfig config, Console console, Clock clock, TestResultSummaryVerbosity summaryVerbosity, ExecutionEnvironment executionEnvironment, Optional<WebServer> webServer, Locale locale, Path testLogPath, TimeZone timeZone) { this( config, console, clock, summaryVerbosity, executionEnvironment, webServer, locale, testLogPath, timeZone, 500L, 500L, 1000L, true); } @VisibleForTesting public SuperConsoleEventBusListener( SuperConsoleConfig config, Console console, Clock clock, TestResultSummaryVerbosity summaryVerbosity, ExecutionEnvironment executionEnvironment, Optional<WebServer> webServer, Locale locale, Path testLogPath, TimeZone timeZone, long minimumDurationMillisecondsToShowParse, long minimumDurationMillisecondsToShowActionGraph, long minimumDurationMillisecondsToShowWatchman, boolean hideEmptyDownload) { super(console, clock, locale, executionEnvironment, false); this.locale = locale; this.formatTimeFunction = this::formatElapsedTime; this.webServer = webServer; this.threadsToRunningTestSummaryEvent = new ConcurrentHashMap<>(executionEnvironment.getAvailableCores()); this.threadsToRunningTestStatusMessageEvent = new ConcurrentHashMap<>(executionEnvironment.getAvailableCores()); this.threadsToRunningStep = new ConcurrentHashMap<>(executionEnvironment.getAvailableCores()); this.logEvents = new ConcurrentLinkedQueue<>(); this.renderScheduler = Executors.newScheduledThreadPool( 1, new ThreadFactoryBuilder().setNameFormat(getClass().getSimpleName() + "-%d").build()); this.testFormatter = new TestResultFormatter( console.getAnsi(), console.getVerbosity(), summaryVerbosity, locale, Optional.of(testLogPath)); this.testRunStarted = new AtomicReference<>(); this.testRunFinished = new AtomicReference<>(); this.defaultThreadLineLimit = config.getThreadLineLimit(); this.threadLineLimitOnWarning = config.getThreadLineLimitOnWarning(); this.threadLineLimitOnError = config.getThreadLineLimitOnError(); this.shouldAlwaysSortThreadsByTime = config.shouldAlwaysSortThreadsByTime(); this.buildRuleMinimumDurationMillis = config.getBuildRuleMinimumDurationMillis(); this.minimumDurationMillisecondsToShowParse = minimumDurationMillisecondsToShowParse; this.minimumDurationMillisecondsToShowActionGraph = minimumDurationMillisecondsToShowActionGraph; this.minimumDurationMillisecondsToShowWatchman = minimumDurationMillisecondsToShowWatchman; this.hideEmptyDownload = hideEmptyDownload; this.dateFormat = new SimpleDateFormat("[yyyy-MM-dd HH:mm:ss.SSS]", this.locale); this.dateFormat.setTimeZone(timeZone); // Using LinkedHashMap because we want a predictable iteration order. this.distBuildSlaveTracker = new LinkedHashMap<>(); } /** Schedules a runnable that updates the console output at a fixed interval. */ public void startRenderScheduler(long renderInterval, TimeUnit timeUnit) { LOG.debug("Starting render scheduler (interval %d ms)", timeUnit.toMillis(renderInterval)); renderScheduler.scheduleAtFixedRate( () -> { try { SuperConsoleEventBusListener.this.render(); } catch (Error | RuntimeException e) { LOG.error(e, "Rendering exception"); throw e; } }, /* initialDelay */ renderInterval, /* period */ renderInterval, timeUnit); } /** Shuts down the thread pool and cancels the fixed interval runnable. */ private synchronized void stopRenderScheduler() { LOG.debug("Stopping render scheduler"); renderScheduler.shutdownNow(); } @VisibleForTesting synchronized void render() { LOG.verbose("Rendering"); String lastRenderClear = clearLastRender(); ImmutableList<String> lines = createRenderLinesAtTime(clock.currentTimeMillis()); ImmutableList<String> logLines = createLogRenderLines(); lastNumLinesPrinted = lines.size(); // Synchronize on the DirtyPrintStreamDecorator to prevent interlacing of output. // We don't log immediately so we avoid locking the console handler to avoid deadlocks. boolean stdoutDirty; boolean stderrDirty; synchronized (console.getStdOut()) { synchronized (console.getStdErr()) { // If another source has written to stderr or stdout, stop rendering with the SuperConsole. // We need to do this to keep our updates consistent. stdoutDirty = console.getStdOut().isDirty(); stderrDirty = console.getStdErr().isDirty(); if (stdoutDirty || stderrDirty) { stopRenderScheduler(); } else if (!lastRenderClear.isEmpty() || !lines.isEmpty() || !logLines.isEmpty()) { Iterable<String> renderedLines = Iterables.concat( MoreIterables.zipAndConcat(logLines, Iterables.cycle("\n")), ansi.asNoWrap(MoreIterables.zipAndConcat(lines, Iterables.cycle("\n")))); StringBuilder fullFrame = new StringBuilder(lastRenderClear); for (String part : renderedLines) { fullFrame.append(part); } console.getStdErr().getRawStream().print(fullFrame); } } } if (stdoutDirty || stderrDirty) { LOG.debug( "Stopping console output (stdout dirty %s, stderr dirty %s).", stdoutDirty, stderrDirty); } } /** * Creates a list of lines to be rendered at a given time. * * @param currentTimeMillis The time in ms to use when computing elapsed times. */ @VisibleForTesting ImmutableList<String> createRenderLinesAtTime(long currentTimeMillis) { ImmutableList.Builder<String> lines = ImmutableList.builder(); logEventPair( "Processing filesystem changes", Optional.empty(), currentTimeMillis, /* offsetMs */ 0L, watchmanStarted, watchmanFinished, Optional.empty(), Optional.of(this.minimumDurationMillisecondsToShowWatchman), lines); logEventPair( "Parsing buck files", /* suffix */ Optional.empty(), currentTimeMillis, /* offsetMs */ 0L, buckFilesParsingEvents.values(), getEstimatedProgressOfProcessingBuckFiles(), Optional.of(this.minimumDurationMillisecondsToShowParse), lines); long parseTime = logEventPair( "Creating action graph", /* suffix */ Optional.empty(), currentTimeMillis, /* offsetMs */ 0L, actionGraphEvents.values(), getEstimatedProgressOfProcessingBuckFiles(), Optional.of(this.minimumDurationMillisecondsToShowActionGraph), lines); logEventPair( "Generating project", Optional.empty(), currentTimeMillis, /* offsetMs */ 0L, projectGenerationStarted, projectGenerationFinished, getEstimatedProgressOfGeneratingProjectFiles(), Optional.empty(), lines); logEventPair( "Refreshing sparse checkout", createAutoSparseStatusMessage(autoSparseSummary), currentTimeMillis, /* offsetMs */ 0L, autoSparseState.values(), /* progress*/ Optional.empty(), Optional.empty(), lines); // If parsing has not finished, then there is no build rule information to print yet. if (buildStarted == null || parseTime == UNFINISHED_EVENT_PAIR) { return lines.build(); } int maxThreadLines = defaultThreadLineLimit; if (anyWarningsPrinted.get() && threadLineLimitOnWarning < maxThreadLines) { maxThreadLines = threadLineLimitOnWarning; } if (anyErrorsPrinted.get() && threadLineLimitOnError < maxThreadLines) { maxThreadLines = threadLineLimitOnError; } if (distBuildStarted != null) { long distBuildMs = logEventPair( "DISTBUILD", getOptionalDistBuildLineSuffix(), currentTimeMillis, 0, this.distBuildStarted, this.distBuildFinished, getApproximateDistBuildProgress(), Optional.empty(), lines); if (distBuildMs == UNFINISHED_EVENT_PAIR) { MultiStateRenderer renderer; synchronized (distBuildSlaveTrackerLock) { renderer = new DistBuildSlaveStateRenderer( ansi, currentTimeMillis, ImmutableList.copyOf(distBuildSlaveTracker.values())); } renderLines(renderer, lines, maxThreadLines, shouldAlwaysSortThreadsByTime); // We don't want to print anything else while dist-build is going on. return lines.build(); } } // TODO(shivanker): Add a similar source file upload line for distributed build. Pair<Long, SizeUnit> bytesDownloaded = networkStatsKeeper.getBytesDownloaded(); if (bytesDownloaded.getFirst() > 0 || !this.hideEmptyDownload) { lines.add(getNetworkStatsLine(buildFinished)); } // Check to see if the build encompasses the time spent parsing. This is true for runs of // buck build but not so for runs of e.g. buck project. If so, subtract parse times // from the build time. long buildStartedTime = buildStarted.getTimestamp(); long buildFinishedTime = buildFinished != null ? buildFinished.getTimestamp() : currentTimeMillis; Collection<EventPair> parsingEvents = getEventsBetween(buildStartedTime, buildFinishedTime, buckFilesParsingEvents.values()); Collection<EventPair> processingEvents = getEventsBetween(buildStartedTime, buildFinishedTime, actionGraphEvents.values()); long offsetMs = getTotalCompletedTimeFromEventPairs(parsingEvents) + getTotalCompletedTimeFromEventPairs(processingEvents); long totalBuildMs = logEventPair( "Building", getOptionalBuildLineSuffix(), currentTimeMillis, offsetMs, // parseTime, this.buildStarted, this.buildFinished, getApproximateBuildProgress(), Optional.empty(), lines); // If the Daemon is running and serving web traffic, print the URL to the Chrome Trace. getBuildTraceURLLine(lines); getBuildTimeLine(lines); if (totalBuildMs == UNFINISHED_EVENT_PAIR) { MultiStateRenderer renderer = new BuildThreadStateRenderer( ansi, formatTimeFunction, currentTimeMillis, buildRuleMinimumDurationMillis, threadsToRunningStep, buildRuleThreadTracker); renderLines(renderer, lines, maxThreadLines, shouldAlwaysSortThreadsByTime); } long testRunTime = logEventPair( "Testing", renderTestSuffix(), currentTimeMillis, 0, /* offsetMs */ testRunStarted.get(), testRunFinished.get(), Optional.empty(), Optional.empty(), lines); if (testRunTime == UNFINISHED_EVENT_PAIR) { MultiStateRenderer renderer = new TestThreadStateRenderer( ansi, formatTimeFunction, currentTimeMillis, threadsToRunningTestSummaryEvent, threadsToRunningTestStatusMessageEvent, threadsToRunningStep, buildRuleThreadTracker); renderLines(renderer, lines, maxThreadLines, shouldAlwaysSortThreadsByTime); } logEventPair( "Installing", /* suffix */ Optional.empty(), currentTimeMillis, 0L, installStarted, installFinished, Optional.empty(), Optional.empty(), lines); logHttpCacheUploads(lines); return lines.build(); } @SuppressWarnings("unused") private void getBuildTraceURLLine(ImmutableList.Builder<String> lines) { if (buildFinished != null && webServer.isPresent()) { Optional<Integer> port = webServer.get().getPort(); if (port.isPresent()) { LOG.debug( "Build logs: http://localhost:%s/trace/%s", port.get(), buildFinished.getBuildId()); } } } private void getBuildTimeLine(ImmutableList.Builder<String> lines) { if (buildStarted != null & buildFinished != null) { long durationMs = buildFinished.getTimestamp() - buildStarted.getTimestamp(); lines.add(" Total time: " + formatElapsedTime(durationMs)); } } private Optional<String> getOptionalDistBuildLineSuffix() { String parseLine; List<String> columns = new ArrayList<>(); synchronized (distBuildStatusLock) { if (!distBuildStatus.isPresent()) { columns.add("STATUS: INIT"); } else { columns.add("STATUS: " + distBuildStatus.get().getStatus()); int totalUploadErrorsCount = 0; int totalFilesMaterialized = 0; ImmutableList.Builder<CacheRateStatsKeeper.CacheRateStatsUpdateEvent> slaveCacheStats = new ImmutableList.Builder<>(); for (BuildSlaveStatus slaveStatus : distBuildStatus.get().getSlaveStatuses()) { totalUploadErrorsCount += slaveStatus.getHttpArtifactUploadsFailureCount(); totalFilesMaterialized += slaveStatus.getFilesMaterializedCount(); if (slaveStatus.isSetCacheRateStats()) { slaveCacheStats.add( CacheRateStatsKeeper.getCacheRateStatsUpdateEventFromSerializedStats( slaveStatus.getCacheRateStats())); } } CacheRateStatsKeeper.CacheRateStatsUpdateEvent aggregatedCacheStats = CacheRateStatsKeeper.getAggregatedCacheRateStats(slaveCacheStats.build()); if (aggregatedCacheStats.getTotalRulesCount() != 0) { columns.add( String.format( "%d [%.1f%%] CACHE MISS", aggregatedCacheStats.getCacheMissCount(), aggregatedCacheStats.getCacheMissRate())); if (aggregatedCacheStats.getCacheErrorCount() != 0) { columns.add( String.format( "%d [%.1f%%] CACHE ERRORS", aggregatedCacheStats.getCacheErrorCount(), aggregatedCacheStats.getCacheErrorRate())); } } if (totalUploadErrorsCount > 0) { columns.add(String.format("%d UPLOAD ERRORS", totalUploadErrorsCount)); } if (totalFilesMaterialized > 0) { columns.add(String.format("%d FILES MATERIALIZED", totalFilesMaterialized)); } if (distBuildStatus.get().getMessage().isPresent()) { columns.add("[" + distBuildStatus.get().getMessage().get() + "]"); } } } parseLine = "(" + Joiner.on(", ").join(columns) + ")"; return Strings.isNullOrEmpty(parseLine) ? Optional.empty() : Optional.of(parseLine); } /** Adds log messages for rendering. */ @VisibleForTesting ImmutableList<String> createLogRenderLines() { ImmutableList.Builder<String> logEventLinesBuilder = ImmutableList.builder(); ConsoleEvent logEvent; while ((logEvent = logEvents.poll()) != null) { formatConsoleEvent(logEvent, logEventLinesBuilder); if (logEvent.getLevel().equals(Level.WARNING)) { anyWarningsPrinted.set(true); } else if (logEvent.getLevel().equals(Level.SEVERE)) { anyErrorsPrinted.set(true); } } return logEventLinesBuilder.build(); } public void renderLines( MultiStateRenderer renderer, ImmutableList.Builder<String> lines, int maxLines, boolean alwaysSortByTime) { int threadCount = renderer.getExecutorCount(); int fullLines = threadCount; boolean useCompressedLine = false; if (threadCount > maxLines) { // One line will be used for the remaining threads that don't get their own line. fullLines = maxLines - 1; useCompressedLine = true; } int threadsWithShortStatus = threadCount - fullLines; boolean sortByTime = alwaysSortByTime || useCompressedLine; ImmutableList<Long> threadIds = renderer.getSortedExecutorIds(sortByTime); StringBuilder lineBuilder = new StringBuilder(EXPECTED_MAXIMUM_RENDERED_LINE_LENGTH); for (int i = 0; i < fullLines; ++i) { long threadId = threadIds.get(i); lineBuilder.delete(0, lineBuilder.length()); lines.add(renderer.renderStatusLine(threadId, lineBuilder)); } if (useCompressedLine) { lineBuilder.delete(0, lineBuilder.length()); lineBuilder.append(" |=> "); lineBuilder.append(threadsWithShortStatus); if (fullLines == 0) { lineBuilder.append(String.format(" %s:", renderer.getExecutorCollectionLabel())); } else { lineBuilder.append(String.format(" MORE %s:", renderer.getExecutorCollectionLabel())); } for (int i = fullLines; i < threadIds.size(); ++i) { long threadId = threadIds.get(i); lineBuilder.append(" "); lineBuilder.append(renderer.renderShortStatus(threadId)); } lines.add(lineBuilder.toString()); } } private Optional<String> renderTestSuffix() { int testPassesVal = numPassingTests.get(); int testFailuresVal = numFailingTests.get(); int testSkipsVal = numDisabledTests.get() + numAssumptionViolationTests.get() + // don't count: numExcludedTests.get() + numDryRunTests.get(); if (testSkipsVal > 0) { return Optional.of( String.format( locale, "(%d PASS/%d SKIP/%d FAIL)", testPassesVal, testSkipsVal, testFailuresVal)); } else if (testPassesVal > 0 || testFailuresVal > 0) { return Optional.of( String.format(locale, "(%d PASS/%d FAIL)", testPassesVal, testFailuresVal)); } else { return Optional.empty(); } } /** * @return A string of ansi characters that will clear the last set of lines printed by {@link * SuperConsoleEventBusListener#createRenderLinesAtTime(long)}. */ private String clearLastRender() { StringBuilder result = new StringBuilder(); for (int i = 0; i < lastNumLinesPrinted; ++i) { result.append(ansi.cursorPreviousLine(1)); result.append(ansi.clearLine()); } return result.toString(); } @Override @Subscribe public void autoSparseStateSparseRefreshFinished( AutoSparseStateEvents.SparseRefreshFinished finished) { super.autoSparseStateSparseRefreshFinished(finished); autoSparseSummary = Optional.of( autoSparseSummary .map(s -> s.combineSummaries(finished.summary)) .orElse(finished.summary)); } @Override @Subscribe public void buildRuleStarted(BuildRuleEvent.Started started) { super.buildRuleStarted(started); } @Override @Subscribe public void buildRuleSuspended(BuildRuleEvent.Suspended suspended) { super.buildRuleSuspended(suspended); } @Override @Subscribe public void buildRuleResumed(BuildRuleEvent.Resumed resumed) { super.buildRuleResumed(resumed); } @Subscribe public void stepStarted(StepEvent.Started started) { threadsToRunningStep.put(started.getThreadId(), Optional.of(started)); } @Subscribe public void stepFinished(StepEvent.Finished finished) { threadsToRunningStep.put(finished.getThreadId(), Optional.empty()); } // TODO(cjhopman): We should introduce a simple LeafEvent-like thing that everything that logs // step-like things can subscribe to. @Subscribe public void simpleLeafEventStarted(LeafEvents.SimpleLeafEvent.Started started) { threadsToRunningStep.put(started.getThreadId(), Optional.of(started)); } @Subscribe public void simpleLeafEventFinished(LeafEvents.SimpleLeafEvent.Finished finished) { threadsToRunningStep.put(finished.getThreadId(), Optional.empty()); } @Subscribe public void ruleKeyCalculationStarted(RuleKeyCalculationEvent.Started started) { threadsToRunningStep.put(started.getThreadId(), Optional.of(started)); } @Subscribe public void ruleKeyCalculationFinished(RuleKeyCalculationEvent.Finished finished) { threadsToRunningStep.put(finished.getThreadId(), Optional.empty()); } @Override @Subscribe public void onDistBuildStatusEvent(DistBuildStatusEvent event) { super.onDistBuildStatusEvent(event); synchronized (distBuildSlaveTrackerLock) { for (BuildSlaveStatus status : event.getStatus().getSlaveStatuses()) { distBuildSlaveTracker.put(status.runId, status); } } } @Subscribe public void artifactCacheStarted(ArtifactCacheEvent.Started started) { if (started.getInvocationType() == ArtifactCacheEvent.InvocationType.SYNCHRONOUS) { threadsToRunningStep.put(started.getThreadId(), Optional.of(started)); } } @Subscribe public void artifactCacheFinished(ArtifactCacheEvent.Finished finished) { if (finished.getInvocationType() == ArtifactCacheEvent.InvocationType.SYNCHRONOUS) { threadsToRunningStep.put(finished.getThreadId(), Optional.empty()); } } @Subscribe public void artifactCompressionStarted(ArtifactCompressionEvent.Started started) { threadsToRunningStep.put(started.getThreadId(), Optional.of(started)); } @Subscribe public void artifactCompressionFinished(ArtifactCompressionEvent.Finished finished) { threadsToRunningStep.put(finished.getThreadId(), Optional.empty()); } @Subscribe public void testRunStarted(TestRunEvent.Started event) { boolean set = testRunStarted.compareAndSet(null, event); Preconditions.checkState(set, "Test run should not start while test run in progress"); ImmutableList.Builder<String> builder = ImmutableList.builder(); testFormatter.runStarted( builder, event.isRunAllTests(), event.getTestSelectorList(), event.shouldExplainTestSelectorList(), event.getTargetNames(), TestResultFormatter.FormatMode.AFTER_TEST_RUN); synchronized (testReportBuilder) { testReportBuilder.addAll(builder.build()); } } @Subscribe public void testRunFinished(TestRunEvent.Finished finished) { boolean set = testRunFinished.compareAndSet(null, finished); Preconditions.checkState(set, "Test run should not finish after test run already finished"); ImmutableList.Builder<String> builder = ImmutableList.builder(); for (TestResults results : finished.getResults()) { testFormatter.reportResult(builder, results); } ImmutableList<TestStatusMessage> testStatusMessages; synchronized (testStatusMessageBuilder) { testStatusMessages = testStatusMessageBuilder.build(); } testFormatter.runComplete(builder, finished.getResults(), testStatusMessages); String testOutput; synchronized (testReportBuilder) { testReportBuilder.addAll(builder.build()); testOutput = Joiner.on('\n').join(testReportBuilder.build()); } // We're about to write to stdout, so make sure we render the final frame before we do. render(); synchronized (console.getStdOut()) { console.getStdOut().println(testOutput); } } @Subscribe public void testStatusMessageStarted(TestStatusMessageEvent.Started started) { threadsToRunningTestStatusMessageEvent.put(started.getThreadId(), Optional.of(started)); synchronized (testStatusMessageBuilder) { testStatusMessageBuilder.add(started.getTestStatusMessage()); } } @Subscribe public void testStatusMessageFinished(TestStatusMessageEvent.Finished finished) { threadsToRunningTestStatusMessageEvent.put(finished.getThreadId(), Optional.empty()); synchronized (testStatusMessageBuilder) { testStatusMessageBuilder.add(finished.getTestStatusMessage()); } } @Subscribe public void testSummaryStarted(TestSummaryEvent.Started started) { threadsToRunningTestSummaryEvent.put(started.getThreadId(), Optional.of(started)); } @Subscribe public void testSummaryFinished(TestSummaryEvent.Finished finished) { threadsToRunningTestSummaryEvent.put(finished.getThreadId(), Optional.empty()); TestResultSummary testResult = finished.getTestResultSummary(); ResultType resultType = testResult.getType(); switch (resultType) { case SUCCESS: numPassingTests.incrementAndGet(); break; case FAILURE: numFailingTests.incrementAndGet(); // We don't use TestResultFormatter.reportResultSummary() here since that also // includes the stack trace and stdout/stderr. logEvents.add( ConsoleEvent.severe( String.format( locale, "%s %s %s: %s", testResult.getType().toString(), testResult.getTestCaseName(), testResult.getTestName(), testResult.getMessage()))); break; case ASSUMPTION_VIOLATION: numAssumptionViolationTests.incrementAndGet(); break; case DISABLED: numDisabledTests.incrementAndGet(); break; case DRY_RUN: numDryRunTests.incrementAndGet(); break; case EXCLUDED: numExcludedTests.incrementAndGet(); break; } } @Subscribe public void logEvent(ConsoleEvent event) { logEvents.add(event); } @Override public void printSevereWarningDirectly(String line) { logEvents.add(ConsoleEvent.severe(line)); } private void printInfoDirectlyOnce(String line) { if (!actionGraphCacheMessage.contains(line)) { logEvents.add(ConsoleEvent.info(line)); actionGraphCacheMessage.add(line); } } @Subscribe @SuppressWarnings("unused") public void actionGraphCacheHit(ActionGraphEvent.Cache.Hit event) { // We don't need to report when it's fast. if (isZeroFileChanges) { LOG.debug("Action graph cache hit: Watchman reported no changes"); } else { LOG.debug("Action graph cache hit"); } parsingStatus = Optional.of("actionGraphCacheHit"); } @Subscribe public void watchmanOverflow(WatchmanStatusEvent.Overflow event) { printInfoDirectlyOnce( "Action graph will be rebuilt because there was an issue with watchman:\n" + event.getReason()); parsingStatus = Optional.of("watchmanOverflow: " + event.getReason()); } private void printFileAddedOrRemoved() { printInfoDirectlyOnce("Action graph will be rebuilt because files have been added or removed."); } @Subscribe @SuppressWarnings("unused") public void watchmanFileCreation(WatchmanStatusEvent.FileCreation event) { LOG.debug("Watchman notified about file addition: " + event.getFilename()); printFileAddedOrRemoved(); parsingStatus = Optional.of("watchmanFileCreation"); } @Subscribe @SuppressWarnings("unused") public void watchmanFileDeletion(WatchmanStatusEvent.FileDeletion event) { LOG.debug("Watchman notified about file deletion: " + event.getFilename()); printFileAddedOrRemoved(); parsingStatus = Optional.of("watchmanFileDeletion"); } @Subscribe @SuppressWarnings("unused") public void watchmanZeroFileChanges(WatchmanStatusEvent.ZeroFileChanges event) { isZeroFileChanges = true; parsingStatus = Optional.of("watchmanZeroFileChanges"); } @Subscribe @SuppressWarnings("unused") public void daemonNewInstance(DaemonEvent.NewDaemonInstance event) { printInfoDirectlyOnce("Buck is creating the action graph."); parsingStatus = Optional.of("daemonNewInstance"); } @Subscribe @SuppressWarnings("unused") public void symlinkInvalidation(ParsingEvent.SymlinkInvalidation event) { printInfoDirectlyOnce("Action graph will be rebuilt because symlinks are used."); parsingStatus = Optional.of("symlinkInvalidation"); } @Subscribe @SuppressWarnings("unused") public void envVariableChange(ParsingEvent.EnvVariableChange event) { printInfoDirectlyOnce("Action graph will be rebuilt because environment variables changed."); parsingStatus = Optional.of("envVariableChange"); } @VisibleForTesting static Optional<String> createParsingMessage(String emoji, String reason) { if (Charset.defaultCharset().equals(Charsets.UTF_8)) { return Optional.of(emoji + " " + reason); } else { if (emoji.equals(EMOJI_BUNNY)) { return Optional.of("(FAST)"); } else { return Optional.of("(SLOW) " + reason); } } } static Optional<String> createAutoSparseStatusMessage(Optional<SparseSummary> summary) { if (!summary.isPresent()) { return Optional.empty(); } SparseSummary sparse_summary = summary.get(); // autosparse only ever exports include rules, we are only interested in added include rules and // the files added count. if (sparse_summary.getIncludeRulesAdded() == 0 && sparse_summary.getFilesAdded() == 0) { return createParsingMessage(EMOJI_DESERT, "Working copy size unchanged"); } return createParsingMessage( EMOJI_ROLODEX, String.format( "%d new sparse rules imported, %d files added to the working copy", sparse_summary.getIncludeRulesAdded(), sparse_summary.getFilesAdded())); } @Override protected String formatElapsedTime(long elapsedTimeMs) { long minutes = elapsedTimeMs / 60_000L; long seconds = elapsedTimeMs / 1000 - (minutes * 60); long milliseconds = elapsedTimeMs % 1000; if (elapsedTimeMs >= 60_000L) { return String.format("%02d:%02d.%d min", minutes, seconds, milliseconds / 100); } else { return String.format("%d.%d sec", seconds, milliseconds / 100); } } @VisibleForTesting Optional<String> getParsingStatus() { return parsingStatus; } @Override public synchronized void close() throws IOException { super.close(); stopRenderScheduler(); render(); // Ensure final frame is rendered. } }
package br.udesc.loman.web.beans; import br.udesc.loman.controle.CadastroTarefasUC; import br.udesc.loman.modelo.Material; import br.udesc.loman.modelo.Ocorrencia; import br.udesc.loman.modelo.Status; import br.udesc.loman.modelo.Tarefa; import br.udesc.loman.web.AutenticacaoUtil; import br.udesc.loman.web.LoManListener; import br.udesc.web.CRUDSemPesquisa; import java.io.*; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; import javax.faces.application.FacesMessage; import javax.faces.bean.ManagedBean; import javax.faces.bean.SessionScoped; import javax.faces.context.FacesContext; import javax.servlet.ServletContext; import org.primefaces.event.FileUploadEvent; import org.primefaces.event.SelectEvent; import org.primefaces.model.DefaultStreamedContent; import org.primefaces.model.StreamedContent; @ManagedBean @SessionScoped public class CadTarefa extends CRUDSemPesquisa<Tarefa>{ private final CadastroTarefasUC ctuc; public CadTarefa() { super(new CadastroTarefasUC(LoManListener.getDAOFactory()), new String[]{}); this.ctuc = (CadastroTarefasUC) cuc; try { } catch (Exception ex) { Logger.getLogger(CadTarefa.class.getName()).log(Level.SEVERE, null, ex); } } public String assumirTarefa() throws Exception { novo(); setSelecTarefa(new Tarefa()); this.tarefasAssumidas = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasAssumidas(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.tarefasDisponiveis = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasDisponiveis(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.ocorrenciasTarefa = new ArrayList<Ocorrencia>(); this.tarefaDesabilitado = true; return "assumirtarefacad"; } public String disponibilizarMaterial() throws Exception { novo(); setSelecTarefa(new Tarefa()); this.tarefasAssumidas = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasAssumidas(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.ocorrenciasTarefa = new ArrayList<Ocorrencia>(); this.tarefaDesabilitado = true; return "disponibilizarmaterialcad"; } public String revisarTarefa() throws Exception { novo(); this.tarefa = new Tarefa(); this.ocorrenciasTarefa = new ArrayList<Ocorrencia>(); this.tarefasProntasParaRevisao = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasProntasParaRevisao(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.tarefaDesabilitado = true; return "revisartarefacad"; } public String tramitarTarefa() throws Exception { novo(); this.tarefa = new Tarefa(); this.ocorrenciasTarefa = new ArrayList<Ocorrencia>(); this.tarefasProntasParaRevisao = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasProntasParaRevisao(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.tarefaDesabilitado = true; this.copiaStatus = Status.CONCLUIDA; return "tramitartarefacad"; } /** * ******************************* */ /* * Tarefa */ /** * ******************************* */ private Tarefa tarefa = new Tarefa(); public Tarefa getSelecTarefa() { return tarefa; } public void setSelecTarefa(Tarefa tarefa) { this.tarefa = tarefa; } private List<Tarefa> tarefasDisponiveis = new ArrayList<Tarefa>(); public List<Tarefa> getTarefasDisponiveis() throws Exception { return tarefasDisponiveis; } public void setTarefasDisponiveis(List<Tarefa> tarefasDisponiveis) { this.tarefasDisponiveis = tarefasDisponiveis; } private List<Tarefa> tarefasAssumidas = new ArrayList<Tarefa>(); public List<Tarefa> getTarefasAssumidas() throws Exception { return tarefasAssumidas; } public void setTarefasAssumidas(List<Tarefa> tarefasAssumidas) { this.tarefasAssumidas = tarefasAssumidas; } List<Tarefa> tarefasProntasParaRevisao = new ArrayList<Tarefa>(); public List<Tarefa> getTarefasProntasParaRevisao() { return tarefasProntasParaRevisao; } public void setTarefasProntasParaRevisao(List<Tarefa> tarefasProntasParaRevisao) { this.tarefasProntasParaRevisao = tarefasProntasParaRevisao; } public String formatarDataTarefa(Tarefa tar) { SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy"); return sdf.format(tar.getDataEntrega()); } Status copiaStatus; public Status getCopiaStatus() { return copiaStatus; } public void setCopiaStatus(Status copiaStatus) { this.copiaStatus = copiaStatus; } private boolean tarefaDesabilitado = true; public boolean isTarefaDesabilitado() { return tarefaDesabilitado; } public void setTarefaDesabilitado(boolean tarefaDesabilitado) { this.tarefaDesabilitado = tarefaDesabilitado; } public void onRowSelectTarefaDisponivel(SelectEvent event) throws Exception { this.ocorrenciasTarefa = LoManListener.getDAOFactory().getOcorrenciaDAO().buscaOcorrenciasTarefa(getSelecTarefa()); this.tarefaDesabilitado = false; } public void onRowSelectTarefaDisponibilizarMaterial(SelectEvent event) throws Exception { this.ocorrenciasTarefa = LoManListener.getDAOFactory().getOcorrenciaDAO().buscaOcorrenciasTarefa(getSelecTarefa()); this.tarefaDesabilitado = false; } public void onRowSelectRevisarTarefa(SelectEvent event) throws Exception { this.ocorrenciasTarefa = LoManListener.getDAOFactory().getOcorrenciaDAO().buscaOcorrenciasTarefa(getSelecTarefa()); this.ocorrenciasTarefaComMaterial = LoManListener.getDAOFactory().getOcorrenciaDAO().buscaOcorrenciasTarefaMateriais(getSelecTarefa()); this.tarefaDesabilitado = false; System.out.println("ON ROW"); System.out.println("ON ROW"); System.out.println("ON ROW"); } public void onRowSelectTramitarTarefa(SelectEvent event) throws Exception { this.ocorrenciasTarefa = LoManListener.getDAOFactory().getOcorrenciaDAO().buscaOcorrenciasTarefa(getSelecTarefa()); this.copiaStatus = Status.CONCLUIDA; } public void onRowSelectOcorrenciaRevisarTarefa(SelectEvent event) throws Exception { this.ocorrencia = (Ocorrencia) event.getObject(); this.material = this.ocorrencia.getMaterial(); } public void associarOcorrenciaTarefa(String descricao){ this.ocorrencia.setUsuario(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.ocorrencia.setTarefa(this.getSelecTarefa()); this.ocorrencia.setDescricao(descricao); this.ocorrencia.setDataModificacao(new java.util.Date()); this.getSelecTarefa().getOcorrencias().add(this.ocorrencia); } public void assumirTarefaUsuario() throws Exception { //SETANDO OCORRENCIA this.associarOcorrenciaTarefa("Assumir Tarefa"); ///*** //SETANDO TAREFA // this.getSelecTarefa().setOcorrencias(ocorrencias); this.getSelecTarefa().setStatus(Status.EM_ANDAMENTO); this.getSelecTarefa().setUsuario(AutenticacaoUtil.getInstance().getUsuarioSessao()); LoManListener.getDAOFactory().getTarefaDAO().alterar(getSelecTarefa()); //*** this.tarefasAssumidas = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasAssumidas(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.tarefasDisponiveis = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasDisponiveis(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.ocorrenciasTarefa = new ArrayList<Ocorrencia>(); this.tarefaDesabilitado = true; } public void tramiteTarefa() throws Exception { //SETANDO OCORRENCIA this.getSelecTarefa().setStatus(copiaStatus); if (this.getSelecTarefa().getStatus() == Status.COM_PROBLEMAS) { this.associarOcorrenciaTarefa("Tarefa com Problemas"); } else { this.associarOcorrenciaTarefa("Tarefa concluida"); } ///*** //SETANDO TAREFA // this.getSelecTarefa().setOcorrencias(ocorrencias); this.getSelecTarefa().setUsuario(this.ocorrencia.getUsuario()); LoManListener.getDAOFactory().getTarefaDAO().alterar(this.getSelecTarefa()); //*** this.tarefasProntasParaRevisao = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasProntasParaRevisao(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.ocorrenciasTarefa = new ArrayList<Ocorrencia>(); this.ocorrencia = new Ocorrencia(); this.tarefaDesabilitado = true; mensagem(FacesMessage.SEVERITY_INFO, "Tarefa", getSelecTarefa().getTitulo() + " tramitada com sucesso!"); } public void setarRegistoTarefa(Tarefa tarefa) { this.registro = tarefa; } /** * ******************************* */ /* * Ocorrencia */ /** * ******************************* */ List<Ocorrencia> ocorrencias = new ArrayList<Ocorrencia>(); public List<Ocorrencia> getOcorrencias() { return ocorrencias; } public void setOcorrencias(List<Ocorrencia> ocorrencias) { this.ocorrencias = ocorrencias; } private Ocorrencia ocorrencia = new Ocorrencia(); public Ocorrencia getSelecOcorrencia() { return ocorrencia; } public void setSelecOcorrencia(Ocorrencia ocorrencia) { this.ocorrencia = ocorrencia; } List<Ocorrencia> ocorrenciasTarefa = new ArrayList<Ocorrencia>(); public List<Ocorrencia> getOcorrenciasTarefa() { return ocorrenciasTarefa; } public void setOcorrenciasTarefa(List<Ocorrencia> ocorrenciasTarefa) { this.ocorrenciasTarefa = ocorrenciasTarefa; } List<Ocorrencia> ocorrenciasTarefaComMaterial = new ArrayList<Ocorrencia>(); public List<Ocorrencia> getOcorrenciasTarefaComMaterial() { return ocorrenciasTarefaComMaterial; } public void setOcorrenciasTarefaComMaterial(List<Ocorrencia> ocorrenciaTarefaComMaterial) { this.ocorrenciasTarefaComMaterial = ocorrenciaTarefaComMaterial; } public String formatarDataOcorrencia(Ocorrencia oco) { SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy"); return sdf.format(oco.getDataModificacao()); } public String formatarNomeArquivo(Ocorrencia oco) { String[] g = oco.getMaterial().getCaminho().split(Pattern.quote("/")); return g[g.length-1]; } /** * ******************************* */ /* * Material */ /** * ******************************* */ private Material material = new Material(); public Material getMaterial() { return material; } public void setMaterial(Material material) { this.material = material; } public void handleFileUpload(FileUploadEvent event) throws IOException, Exception { mensagem(FacesMessage.SEVERITY_INFO, "Sucesso!", event.getFile().getFileName() + " foi salvo!"); byte[] buffer = event.getFile().getContents(); //Tratando nome do arquivo String nomeArquivo = event.getFile().getFileName(); String[] g = nomeArquivo.split(Pattern.quote(".")); nomeArquivo = ""; for (int i = 0; i < g.length; i++) { if (i == g.length - 1) { g[i] = "(" + AutenticacaoUtil.getInstance().getUsuarioSessao().getNick() + ")" + "." + g[i]; } nomeArquivo = nomeArquivo + g[i]; } FacesContext context = FacesContext.getCurrentInstance(); String caminhoMaterial = context.getExternalContext().getRealPath("/WEB-INF/materiais/" + nomeArquivo); String caminhoBanco = "/WEB-INF/materiais/" + nomeArquivo; //*** //Setando o Material List<Material> materiais = new ArrayList<Material>(); this.material.setObservacao(ocorrencia.getObservacao()); this.material.setTarefa(getSelecTarefa()); this.material.setCaminho(caminhoBanco); materiais.add(material); this.getSelecTarefa().setMateriais(materiais); //*** //Setanto a Ocorrencia this.associarOcorrenciaTarefa("Upload de material"); this.ocorrencia.setMaterial(material); // this.getSelecTarefa().setOcorrencias(ocorrencias); this.getSelecTarefa().setStatus(Status.PRONTA_PARA_REVISAO); LoManListener.getDAOFactory().getTarefaDAO().alterar(getSelecTarefa()); //*** //Gravando arquivo no disco PrintStream newFile = new PrintStream(caminhoMaterial); newFile.write(buffer); newFile.close(); //*** //Atualizando listas e campos this.limparObservacao(); this.tarefasDisponiveis = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasDisponiveis(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.tarefasAssumidas = LoManListener.getDAOFactory().getTarefaDAO().buscaTarefasAssumidas(AutenticacaoUtil.getInstance().getUsuarioSessao()); this.ocorrenciasTarefa = new ArrayList<Ocorrencia>(); tarefaDesabilitado = true; //*** } public void tarefaConcluida() { this.getSelecTarefa().setStatus(Status.CONCLUIDA); } public void limparObservacao() { this.ocorrencia.setObservacao(null); } private StreamedContent file; public StreamedContent getFile() { return file; } public void setarOcorrenciasTarefaComMaterial() throws Exception { System.out.println("ACTIONLISTNER"); System.out.println("ACTIONLISTNER"); System.out.println("ACTIONLISTNER"); this.ocorrenciasTarefaComMaterial = LoManListener.getDAOFactory().getOcorrenciaDAO().buscaOcorrenciasTarefaMateriais(getSelecTarefa()); } public void dowloadMaterial(Ocorrencia oco) { this.setSelecOcorrencia(oco); // InputStream stream = ((ServletContext) FacesContext.getCurrentInstance().getExternalContext().getContext()).getResourceAsStream("/WEB-INF/materiais/Chrysanthemum(Pedro).jpg"); InputStream stream = ((ServletContext) FacesContext.getCurrentInstance().getExternalContext().getContext()).getResourceAsStream(this.getSelecOcorrencia().getMaterial().getCaminho()); String nomeDownload = this.getSelecOcorrencia().getMaterial().getCaminho(); String[] novoNome = nomeDownload.split("/"); nomeDownload = novoNome[novoNome.length - 1]; file = new DefaultStreamedContent(stream, "image/jpg", nomeDownload); } public void mensagem(FacesMessage.Severity severity, String titulo, String mensagem) { FacesMessage msg = new FacesMessage(severity, titulo, mensagem); FacesContext.getCurrentInstance().addMessage(null, msg); } }
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.page.admin.reports.dto; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.xml.ns._public.common.common_3.ExportType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ReportType; import com.evolveum.prism.xml.ns._public.types_3.PolyStringType; import java.io.Serializable; /** * @author lazyman */ public class ReportDto implements Serializable { public static final String F_PARENT = "parent"; public static final String F_OID = "oid"; public static final String F_XML = "xml"; public static final String F_NAME = "name"; public static final String F_DESCRIPTION = "description"; public static final String F_EXPORT_TYPE = "exportType"; public static final String F_VIRTUALIZER = "virtualizer"; public static final String F_VIRTUALIZER_KICKON = "virtualizerKickOn"; public static final String F_MAXPAGES = "maxPages"; public static final String F_TIMEOUT = "timeout"; private boolean parent; private String oid; // private String xml; private String name; private String description; private boolean searchOnResource; private ExportType exportType; private JasperReportDto jasperReportDto; private byte[] templateStyle; private String virtualizer; private Integer virtualizerKickOn; private Integer maxPages; private Integer timeout; // private PrismObject<ReportType> object; private ReportType reportType; public ReportDto() { } public ReportDto(byte[] reportJrxml) { this.jasperReportDto = new JasperReportDto(reportJrxml); } public ReportDto(ReportType reportType, boolean onlyForPromptingParams) { this.oid = reportType.getOid(); this.name = reportType.getName().getOrig(); this.exportType = reportType.getExport(); this.searchOnResource = false; this.description = reportType.getDescription(); // this.xml = new String(Base64.decodeBase64(reportType.getTemplate())); this.jasperReportDto = new JasperReportDto(reportType.getTemplate(), onlyForPromptingParams); this.templateStyle = reportType.getTemplateStyle(); this.parent = reportType.isParent(); this.virtualizer = reportType.getVirtualizer(); this.virtualizerKickOn = reportType.getVirtualizerKickOn(); this.maxPages = reportType.getMaxPages(); this.timeout = reportType.getTimeout(); this.reportType = reportType; } public ReportDto(ReportType reportType) { this(reportType, false); } public ReportDto(String name, String description) { this.description = description; this.name = name; } public ReportDto(String name, String description, ExportType export, boolean parent) { this.name = name; this.description = description; // this.xml = xml; this.exportType = export; this.parent = parent; } public boolean isParent() { return parent; } public void setParent(boolean parent) { this.parent = parent; } public PrismObject<ReportType> getObject() { if (reportType == null) { reportType = new ReportType(); //TODO FIXME temporary every new report will be set as parent report reportType.setParent(Boolean.TRUE); } reportType.setName(new PolyStringType(name)); reportType.setExport(exportType); reportType.setTemplate(jasperReportDto.getTemplate()); reportType.setTemplateStyle(templateStyle); reportType.setDescription(description); reportType.setVirtualizer(virtualizer); reportType.setVirtualizerKickOn(virtualizerKickOn); reportType.setMaxPages(maxPages); reportType.setTimeout(timeout); return reportType.asPrismObject(); } public void setObject(PrismObject<ReportType> object) { this.reportType = object.asObjectable(); } public String getOid() { return oid; } public void setOid(String oid) { this.oid = oid; } // public String getXml() { // return xml; // } // // public void setXml(String xml) { // this.xml = xml; // } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public ExportType getExportType() { return exportType; } public void setExportType(ExportType exportType) { this.exportType = exportType; } public JasperReportDto getJasperReportDto() { return jasperReportDto; } public String getVirtualizer() { return virtualizer; } public void setVirtualizer(String virtualizer) { this.virtualizer = virtualizer; } public Integer getVirtualizerKickOn() { return virtualizerKickOn; } public void setVirtualizerKickOn(Integer virtualizerKickOn) { this.virtualizerKickOn = virtualizerKickOn; } public Integer getMaxPages() { return maxPages; } public void setMaxPages(Integer maxPages) { this.maxPages = maxPages; } public Integer getTimeout() { return timeout; } public void setTimeout(Integer timeout) { this.timeout = timeout; } }
package com.annotations.openface.omnibean.json; import java.io.IOException; import java.io.Writer; /* Copyright (c) 2006 JSON.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The Software shall be used for Good, not Evil. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /** * JSONWriter provides a quick and convenient way of producing JSON text. * The texts produced strictly conform to JSON syntax rules. No whitespace is * added, so the results are ready for transmission or storage. Each instance of * JSONWriter can produce one JSON text. * <p> * A JSONWriter instance provides a <code>value</code> method for appending * values to the * text, and a <code>key</code> * method for adding keys before values in objects. There are <code>array</code> * and <code>endArray</code> methods that make and bound array values, and * <code>object</code> and <code>endObject</code> methods which make and bound * object values. All of these methods return the JSONWriter instance, * permitting a cascade style. For example, <pre> * new JSONWriter(myWriter) * .object() * .key("JSON") * .value("Hello, World!") * .endObject();</pre> which writes <pre> * {"JSON":"Hello, World!"}</pre> * <p> * The first method called must be <code>array</code> or <code>object</code>. * There are no methods for adding commas or colons. JSONWriter adds them for * you. Objects and arrays can be nested up to 20 levels deep. * <p> * This can sometimes be easier than using a JSONObject to build a string. * @author JSON.org * @version 2011-11-24 */ public class JSONWriter { private static final int maxdepth = 200; /** * The comma flag determines if a comma should be output before the next * value. */ private boolean comma; /** * The current mode. Values: * 'a' (array), * 'd' (done), * 'i' (initial), * 'k' (key), * 'o' (object). */ protected char mode; /** * The object/array stack. */ private final JSONObject stack[]; /** * The stack top index. A value of 0 indicates that the stack is empty. */ private int top; /** * The writer that will receive the output. */ protected Writer writer; /** * Make a fresh JSONWriter. It can be used to build one JSON text. */ public JSONWriter(Writer w) { this.comma = false; this.mode = 'i'; this.stack = new JSONObject[maxdepth]; this.top = 0; this.writer = w; } /** * Append a value. * @param string A string value. * @return this * @throws JSONException If the value is out of sequence. */ private JSONWriter append(String string) throws JSONException { if (string == null) { throw new JSONException("Null pointer"); } if (this.mode == 'o' || this.mode == 'a') { try { if (this.comma && this.mode == 'a') { this.writer.write(','); } this.writer.write(string); } catch (IOException e) { throw new JSONException(e); } if (this.mode == 'o') { this.mode = 'k'; } this.comma = true; return this; } throw new JSONException("Value out of sequence."); } /** * Begin appending a new array. All values until the balancing * <code>endArray</code> will be appended to this array. The * <code>endArray</code> method must be called to mark the array's end. * @return this * @throws JSONException If the nesting is too deep, or if the object is * started in the wrong place (for example as a key or after the end of the * outermost array or object). */ public JSONWriter array() throws JSONException { if (this.mode == 'i' || this.mode == 'o' || this.mode == 'a') { this.push(null); this.append("["); this.comma = false; return this; } throw new JSONException("Misplaced array."); } /** * End something. * @param mode Mode * @param c Closing character * @return this * @throws JSONException If unbalanced. */ private JSONWriter end(char mode, char c) throws JSONException { if (this.mode != mode) { throw new JSONException(mode == 'a' ? "Misplaced endArray." : "Misplaced endObject."); } this.pop(mode); try { this.writer.write(c); } catch (IOException e) { throw new JSONException(e); } this.comma = true; return this; } /** * End an array. This method most be called to balance calls to * <code>array</code>. * @return this * @throws JSONException If incorrectly nested. */ public JSONWriter endArray() throws JSONException { return this.end('a', ']'); } /** * End an object. This method most be called to balance calls to * <code>object</code>. * @return this * @throws JSONException If incorrectly nested. */ public JSONWriter endObject() throws JSONException { return this.end('k', '}'); } /** * Append a key. The key will be associated with the next value. In an * object, every value must be preceded by a key. * @param string A key string. * @return this * @throws JSONException If the key is out of place. For example, keys * do not belong in arrays or if the key is null. */ public JSONWriter key(String string) throws JSONException { if (string == null) { throw new JSONException("Null key."); } if (this.mode == 'k') { try { this.stack[this.top - 1].putOnce(string, Boolean.TRUE); if (this.comma) { this.writer.write(','); } this.writer.write(JSONObject.quote(string)); this.writer.write(':'); this.comma = false; this.mode = 'o'; return this; } catch (IOException e) { throw new JSONException(e); } } throw new JSONException("Misplaced key."); } /** * Begin appending a new object. All keys and values until the balancing * <code>endObject</code> will be appended to this object. The * <code>endObject</code> method must be called to mark the object's end. * @return this * @throws JSONException If the nesting is too deep, or if the object is * started in the wrong place (for example as a key or after the end of the * outermost array or object). */ public JSONWriter object() throws JSONException { if (this.mode == 'i') { this.mode = 'o'; } if (this.mode == 'o' || this.mode == 'a') { this.append("{"); this.push(new JSONObject()); this.comma = false; return this; } throw new JSONException("Misplaced object."); } /** * Pop an array or object scope. * @param c The scope to close. * @throws JSONException If nesting is wrong. */ private void pop(char c) throws JSONException { if (this.top <= 0) { throw new JSONException("Nesting error."); } char m = this.stack[this.top - 1] == null ? 'a' : 'k'; if (m != c) { throw new JSONException("Nesting error."); } this.top -= 1; this.mode = this.top == 0 ? 'd' : this.stack[this.top - 1] == null ? 'a' : 'k'; } /** * Push an array or object scope. * @param c The scope to open. * @throws JSONException If nesting is too deep. */ private void push(JSONObject jo) throws JSONException { if (this.top >= maxdepth) { throw new JSONException("Nesting too deep."); } this.stack[this.top] = jo; this.mode = jo == null ? 'a' : 'k'; this.top += 1; } /** * Append either the value <code>true</code> or the value * <code>false</code>. * @param b A boolean. * @return this * @throws JSONException */ public JSONWriter value(boolean b) throws JSONException { return this.append(b ? "true" : "false"); } /** * Append a double value. * @param d A double. * @return this * @throws JSONException If the number is not finite. */ public JSONWriter value(double d) throws JSONException { return this.value(new Double(d)); } /** * Append a long value. * @param l A long. * @return this * @throws JSONException */ public JSONWriter value(long l) throws JSONException { return this.append(Long.toString(l)); } /** * Append an object value. * @param object The object to append. It can be null, or a Boolean, Number, * String, JSONObject, or JSONArray, or an object that implements JSONString. * @return this * @throws JSONException If the value is out of sequence. */ public JSONWriter value(Object object) throws JSONException { return this.append(JSONObject.valueToString(object)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rave.portal.repository.impl; import org.apache.rave.exception.NotSupportedException; import org.apache.rave.portal.model.JpaPerson; import org.apache.rave.model.Person; import org.apache.rave.portal.model.impl.PersonImpl; import org.apache.rave.portal.repository.PersonRepository; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.annotation.Rollback; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.transaction.annotation.Transactional; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import java.util.List; import static junit.framework.Assert.assertNotNull; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.nullValue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations={"classpath:test-applicationContext.xml", "classpath:test-dataContext.xml"}) public class JpaPersonRepositoryTest { private static final String VALID_ID = "1"; private static final String VALID_ID2 = "2"; private static final String VALID_USER = "canonical"; private static final String VALID_USER2 = "john.doe"; private static final String VALID_USER3 = "jane.doe"; private static final String VALID_USER4 = "george.doe"; private static final String VALID_USER5 = "mario.rossi"; private static final String INVALID_USERNAME = "INVALID_USERNAME"; private static final String FEMALE = "female"; private static final String NYTIMES_GADGET_APPID = "http://widgets.nytimes.com/packages/html/igoogle/topstories.xml"; @PersistenceContext private EntityManager manager; @Autowired private PersonRepository repository; @Test public void getType() { assertEquals(repository.getType(), JpaPerson.class); } @Test public void get() { JpaPerson p = (JpaPerson) repository.get(VALID_ID); assertThat(p.getEntityId().toString(), is(VALID_ID)); assertThat(p.getUsername(), is(VALID_USER)); } @Test public void getAll(){ List<Person> people = repository.getAll(); assertNotNull(people); assertThat(people.size(), is(13)); } @Test public void getLimitedList() { final int offset = 5; final int pageSize = 5; List<Person> people = repository.getLimitedList(offset, pageSize); Assert.assertNotNull(people); assertThat(people.size(), is(5)); } @Test public void countAll() { int count = repository.getCountAll(); assertThat(count, is(13)); } @Test public void findByUsername_valid() { Person person = repository.findByUsername(VALID_USER); assertThat(person, is(not(nullValue()))); assertThat(person.getUsername(), is(equalTo(VALID_USER))); } @Test public void findByUsername_null() { Person person = repository.findByUsername(INVALID_USERNAME); assertThat(person, is(nullValue())); } @Test public void findFriends_valid() { List<Person> connected = repository.findFriends(VALID_USER); assertThat(connected.size(), is(equalTo(3))); assertThat(connected.get(0).getUsername(), is(equalTo(VALID_USER2))); assertThat(connected.get(1).getUsername(), is(equalTo(VALID_USER3))); assertThat(connected.get(2).getUsername(), is(equalTo(VALID_USER4))); } @Test public void findFriends_invalid() { List<Person> connected = repository.findFriends(INVALID_USERNAME); assertThat(connected.isEmpty(), is(true)); } @Test public void findConnected_valid() { List<Person> connected = repository.findAllConnectedPeople(VALID_USER); assertThat(connected.size(), is(equalTo(4))); assertThat(connected.get(0).getUsername(), is(equalTo(VALID_USER2))); assertThat(connected.get(1).getUsername(), is(equalTo(VALID_USER3))); assertThat(connected.get(2).getUsername(), is(equalTo(VALID_USER4))); assertThat(connected.get(3).getUsername(), is(equalTo(VALID_USER5))); } @Test public void findConnected_invalid() { List<Person> connected = repository.findFriends(INVALID_USERNAME); assertThat(connected.isEmpty(), is(true)); } @Test public void findByGroup_valid() { List<Person> connected = repository.findByGroup("Party"); assertThat(connected.size(), is(equalTo(2))); assertThat(connected.get(0).getUsername(), is(equalTo(VALID_USER))); assertThat(connected.get(1).getUsername(), is(equalTo(VALID_USER5))); } @Test public void findByGroup_invalid() { List<Person> connected = repository.findByGroup(INVALID_USERNAME); assertThat(connected.isEmpty(), is(true)); } @Test(expected = NotSupportedException.class) public void findAllConnectedPeople_2param() { repository.findAllConnectedPeople("asdf", "asdf"); } @Test(expected = NotSupportedException.class) public void findAllConnectedPeopleWithFriend() { repository.findAllConnectedPeopleWithFriend("asdf", "asdf"); } @Test public void findFriendsUsingApp_valid() { List<Person> friendsUsingApp = repository.findFriends(VALID_USER, NYTIMES_GADGET_APPID); assertThat(friendsUsingApp.size(), is(equalTo(1))); assertThat(friendsUsingApp.get(0).getUsername(), is(equalTo(VALID_USER2))); } @Test public void findFriendsUsingApp_invalid() { List<Person> friendsUsingApp = repository.findFriends(INVALID_USERNAME, NYTIMES_GADGET_APPID); assertThat(friendsUsingApp.isEmpty(), is(true)); } @Test public void findFriendsWithFriend_valid() { List<Person> friendsWithFriend = repository.findFriendsWithFriend(VALID_USER, VALID_USER2); assertThat(friendsWithFriend.size(), is(equalTo(2))); assertThat(friendsWithFriend.get(0).getUsername(), is(equalTo(VALID_USER3))); assertThat(friendsWithFriend.get(1).getUsername(), is(equalTo(VALID_USER4))); } @Test public void findFriendsWithFriend_invalid() { List<Person> friendsWithFriend = repository.findFriends(INVALID_USERNAME); assertThat(friendsWithFriend.isEmpty(), is(true)); } @Test(expected = NotSupportedException.class) public void findByGroup() { repository.findByGroup("asdf", "asdf"); } @Test(expected = NotSupportedException.class) public void findByGroupWithFriend() { repository.findByGroupWithFriend("asdf", "asdf"); } @Test @Transactional(readOnly=false) @Rollback(true) public void save_new() { final String NEW_USERNAME = "test123"; final String NEW_ABOUT_ME = "about me blah blah"; JpaPerson person = new JpaPerson(); person.setUsername(NEW_USERNAME); person.setAboutMe(NEW_ABOUT_ME); assertThat(person.getEntityId(), is(nullValue())); repository.save(person); Long newId = person.getEntityId(); assertThat(newId > 0, is(true)); JpaPerson newPerson = (JpaPerson) repository.get(newId.toString()); assertThat(newPerson.getAboutMe(), is(NEW_ABOUT_ME)); assertThat(newPerson.getUsername(), is(NEW_USERNAME)); } @Test @Transactional(readOnly=false) @Rollback(true) public void save_existing() { final String UPDATED_ABOUT_ME = "updated about me"; Person person = repository.get(VALID_ID); assertThat(person.getAboutMe(), is(not(UPDATED_ABOUT_ME))); person.setAboutMe(UPDATED_ABOUT_ME); repository.save(person); assertThat(repository.get(VALID_ID).getAboutMe(), is(UPDATED_ABOUT_ME)); } @Test @Transactional(readOnly=false) @Rollback(true) public void delete_jpaObject() { Person person = repository.get(VALID_ID); assertThat(person, is(notNullValue())); repository.delete(person); person = repository.get(VALID_ID); assertThat(person, is(nullValue())); } @Test @Transactional(readOnly=false) @Rollback(true) public void delete_implObject() { Person person = repository.get(VALID_ID); assertThat(person, is(notNullValue())); PersonImpl impl = new PersonImpl(); impl.setUsername(person.getUsername()); repository.delete(impl); person = repository.get(VALID_ID); assertThat(person, is(nullValue())); } @Test @Transactional(readOnly=false) @Rollback(true) public void addFriend() { repository.addFriend(VALID_USER5, VALID_USER); List<Person> friendRequestsSent = repository.findFriendRequestsSent(VALID_USER); assertThat(friendRequestsSent.size(), is(equalTo(1))); assertThat(friendRequestsSent.get(0).getUsername(), is(equalTo(VALID_USER5))); List<Person> friendRequestsReceived = repository.findFriendRequestsReceived(VALID_USER5); assertThat(friendRequestsReceived.size(), is(equalTo(1))); assertThat(friendRequestsReceived.get(0).getUsername(), is(equalTo(VALID_USER))); // Checking user5 has only received the friend request from user(canonical) and user(canonical) is not added to his friend list List<Person> friends = repository.findFriends(VALID_USER5); assertThat(friends.size(), is(equalTo(1))); assertThat(friends.get(0).getUsername(), is(equalTo(VALID_USER2))); } @Test @Transactional(readOnly=false) @Rollback(true) public void removeFriend() { List<Person> friends = repository.findFriends(VALID_USER); assertThat(friends.size(), is(equalTo(3))); assertThat(friends.get(0).getUsername(), is(equalTo(VALID_USER2))); assertThat(friends.get(1).getUsername(), is(equalTo(VALID_USER3))); assertThat(friends.get(2).getUsername(), is(equalTo(VALID_USER4))); repository.removeFriend(VALID_USER4, VALID_USER); friends = repository.findFriends(VALID_USER); assertThat(friends.size(), is(equalTo(2))); assertThat(friends.get(0).getUsername(), is(equalTo(VALID_USER2))); assertThat(friends.get(1).getUsername(), is(equalTo(VALID_USER3))); } @Test @Transactional(readOnly=false) @Rollback(true) public void findFriendRequestsSent() { List<Person> friendRequestsSent = repository.findFriendRequestsSent(VALID_USER); assertThat(friendRequestsSent.size(), is(equalTo(0))); repository.addFriend(VALID_USER5, VALID_USER); friendRequestsSent = repository.findFriendRequestsSent(VALID_USER); assertThat(friendRequestsSent.size(), is(equalTo(1))); assertThat(friendRequestsSent.get(0).getUsername(), is(equalTo(VALID_USER5))); } @Test @Transactional(readOnly=false) @Rollback(true) public void findFriendRequestsReceived() { List<Person> friendRequestsReceived = repository.findFriendRequestsReceived(VALID_USER5); assertThat(friendRequestsReceived.size(), is(equalTo(0))); repository.addFriend(VALID_USER5, VALID_USER); friendRequestsReceived = repository.findFriendRequestsReceived(VALID_USER5); assertThat(friendRequestsReceived.size(), is(equalTo(1))); assertThat(friendRequestsReceived.get(0).getUsername(), is(equalTo(VALID_USER))); } @Test @Transactional(readOnly=false) @Rollback(true) public void acceptFriendRequest() { List<Person> friends = repository.findFriends(VALID_USER); assertThat(friends.size(), is(equalTo(3))); assertThat(friends.get(0).getUsername(), is(equalTo(VALID_USER2))); assertThat(friends.get(1).getUsername(), is(equalTo(VALID_USER3))); assertThat(friends.get(2).getUsername(), is(equalTo(VALID_USER4))); friends = repository.findFriends(VALID_USER5); assertThat(friends.size(), is(equalTo(1))); assertThat(friends.get(0).getUsername(), is(equalTo(VALID_USER2))); repository.addFriend(VALID_USER5, VALID_USER); repository.acceptFriendRequest(VALID_USER, VALID_USER5); friends = repository.findFriends(VALID_USER); assertThat(friends.size(), is(equalTo(4))); assertThat(friends.get(0).getUsername(), is(equalTo(VALID_USER2))); assertThat(friends.get(1).getUsername(), is(equalTo(VALID_USER3))); assertThat(friends.get(2).getUsername(), is(equalTo(VALID_USER4))); assertThat(friends.get(3).getUsername(), is(equalTo(VALID_USER5))); friends = repository.findFriends(VALID_USER5); assertThat(friends.size(), is(equalTo(2))); assertThat(friends.get(0).getUsername(), is(equalTo(VALID_USER2))); assertThat(friends.get(1).getUsername(), is(equalTo(VALID_USER))); } @Test @Transactional(readOnly=false) @Rollback(true) public void removeFriendsAndRequests() { List<Person> friends = repository.findFriends(VALID_USER); assertThat(friends.size(), is(equalTo(3))); assertThat(friends.get(0).getUsername(), is(equalTo(VALID_USER2))); assertThat(friends.get(1).getUsername(), is(equalTo(VALID_USER3))); assertThat(friends.get(2).getUsername(), is(equalTo(VALID_USER4))); repository.removeAllFriendsAndRequests(VALID_ID2); friends = repository.findFriends(VALID_USER); assertThat(friends.size(), is(equalTo(2))); assertThat(friends.get(0).getUsername(), is(equalTo(VALID_USER3))); assertThat(friends.get(1).getUsername(), is(equalTo(VALID_USER4))); List<Person> friendsUser2 = repository.findFriends(VALID_USER2); assertThat(friendsUser2.size(), is(equalTo(0))); } @Test public void read_properties() { Person person = repository.get(VALID_ID); assertThat(person.getProperties().size(), is(1)); } }
/* * Copyright (c) 2010-2017 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.test; import com.evolveum.icf.dummy.resource.DummyGroup; import com.evolveum.icf.dummy.resource.ScriptHistoryEntry; import com.evolveum.midpoint.common.refinery.RefinedResourceSchema; import com.evolveum.midpoint.common.refinery.RefinedResourceSchemaImpl; import com.evolveum.midpoint.prism.*; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.match.MatchingRule; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.polystring.PolyString; import com.evolveum.midpoint.prism.query.AndFilter; import com.evolveum.midpoint.prism.query.EqualFilter; import com.evolveum.midpoint.prism.query.ObjectFilter; import com.evolveum.midpoint.prism.query.ObjectQuery; import com.evolveum.midpoint.prism.query.RefFilter; import com.evolveum.midpoint.prism.query.builder.QueryBuilder; import com.evolveum.midpoint.prism.schema.PrismSchema; import com.evolveum.midpoint.prism.schema.PrismSchemaImpl; import com.evolveum.midpoint.prism.util.PrismAsserts; import com.evolveum.midpoint.prism.util.PrismTestUtil; import com.evolveum.midpoint.prism.util.PrismUtil; import com.evolveum.midpoint.repo.api.RepositoryService; import com.evolveum.midpoint.repo.cache.RepositoryCache; import com.evolveum.midpoint.schema.SearchResultList; import com.evolveum.midpoint.schema.constants.ConnectorTestOperation; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.processor.ObjectClassComplexTypeDefinition; import com.evolveum.midpoint.schema.processor.ResourceAttribute; import com.evolveum.midpoint.schema.processor.ResourceAttributeContainer; import com.evolveum.midpoint.schema.processor.ResourceAttributeContainerDefinition; import com.evolveum.midpoint.schema.processor.ResourceAttributeDefinition; import com.evolveum.midpoint.schema.processor.ResourceSchema; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.result.OperationResultStatus; import com.evolveum.midpoint.schema.util.ConnectorTypeUtil; import com.evolveum.midpoint.schema.util.ObjectTypeUtil; import com.evolveum.midpoint.schema.util.ShadowUtil; import com.evolveum.midpoint.schema.util.ResourceTypeUtil; import com.evolveum.midpoint.schema.util.SchemaDebugUtil; import com.evolveum.midpoint.schema.util.SchemaTestConstants; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.util.DOMUtil; import com.evolveum.midpoint.util.DebugDumpable; import com.evolveum.midpoint.util.DebugUtil; import com.evolveum.midpoint.util.exception.*; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.*; import com.evolveum.prism.xml.ns._public.types_3.PolyStringType; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.exception.ExceptionUtils; import org.opends.server.types.Entry; import org.opends.server.types.SearchResultEntry; import org.testng.AssertJUnit; import org.w3c.dom.Element; import javax.xml.namespace.QName; import java.io.IOException; import java.io.RandomAccessFile; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.stream.Collectors; import static org.testng.AssertJUnit.*; /** * @author Radovan Semancik * */ public class IntegrationTestTools { // Constants from test-config.xml. public static final String CONST_USELESS = "xUSEless"; public static final String CONST_DRINK = "rum"; public static final String CONST_BLABLA = "Bla bla bla"; public static final String DUMMY_CONNECTOR_TYPE = "com.evolveum.icf.dummy.connector.DummyConnector"; public static final String DBTABLE_CONNECTOR_TYPE = "org.identityconnectors.databasetable.DatabaseTableConnector"; public static final String CONNECTOR_LDAP_TYPE = "com.evolveum.polygon.connector.ldap.LdapConnector"; public static final String LDAP_CONNECTOR_TYPE = "com.evolveum.polygon.connector.ldap.LdapConnector"; public static final String NS_RESOURCE_DUMMY_CONFIGURATION = "http://midpoint.evolveum.com/xml/ns/public/connector/icf-1/bundle/com.evolveum.icf.dummy/com.evolveum.icf.dummy.connector.DummyConnector"; public static final QName RESOURCE_DUMMY_CONFIGURATION_USELESS_STRING_ELEMENT_NAME = new QName(NS_RESOURCE_DUMMY_CONFIGURATION ,"uselessString"); // public and not final - to allow changing it in tests public static Trace LOGGER = TraceManager.getTrace(IntegrationTestTools.class); private static final String OBJECT_TITLE_OUT_PREFIX = "\n*** "; private static final String OBJECT_TITLE_LOG_PREFIX = "*** "; private static final String LOG_MESSAGE_PREFIX = ""; private static final String OBJECT_LIST_SEPARATOR = "---"; private static final long WAIT_FOR_LOOP_SLEEP_MILIS = 500; public static OperationResult assertSingleConnectorTestResult(OperationResult testResult) { List<OperationResult> connectorSubresults = getConnectorSubresults(testResult); assertEquals("Unexpected number of connector tests in test result", 1, connectorSubresults.size()); return connectorSubresults.get(0); } private static List<OperationResult> getConnectorSubresults(OperationResult testResult) { return testResult.getSubresults().stream().filter(r -> r.getOperation().equals(ConnectorTestOperation.CONNECTOR_TEST.getOperation())).collect(Collectors.toList()); } public static void assertTestResourceSuccess(OperationResult testResult, ConnectorTestOperation operation) { OperationResult opResult = testResult.findSubresult(operation.getOperation()); assertNotNull("No result for "+operation, opResult); TestUtil.assertSuccess("Test resource failed (result): "+operation, opResult, 1); } public static void assertTestResourceFailure(OperationResult testResult, ConnectorTestOperation operation) { OperationResult opResult = testResult.findSubresult(operation.getOperation()); assertNotNull("No result for "+operation, opResult); TestUtil.assertFailure("Test resource succeeded while expected failure (result): "+operation, opResult); } public static void assertTestResourceNotApplicable(OperationResult testResult, ConnectorTestOperation operation) { OperationResult opResult = testResult.findSubresult(operation.getOperation()); assertNotNull("No result for "+operation, opResult); assertEquals("Test resource status is not 'not applicable', it is "+opResult.getStatus()+": "+operation, OperationResultStatus.NOT_APPLICABLE, opResult.getStatus()); } public static void assertNotEmpty(String message, String s) { assertNotNull(message, s); assertFalse(message, s.isEmpty()); } public static void assertNotEmpty(PolyString ps) { assertNotNull(ps); assertFalse(ps.isEmpty()); } public static void assertNotEmpty(PolyStringType ps) { assertNotNull(ps); assertFalse(PrismUtil.isEmpty(ps)); } public static void assertNotEmpty(String message, PolyString ps) { assertNotNull(message, ps); assertFalse(message, ps.isEmpty()); } public static void assertNotEmpty(String message, PolyStringType ps) { assertNotNull(message, ps); assertFalse(message, PrismUtil.isEmpty(ps)); } public static void assertNotEmpty(String s) { assertNotNull(s); assertFalse(s.isEmpty()); } public static void assertNotEmpty(String message, QName qname) { assertNotNull(message, qname); assertNotEmpty(message,qname.getNamespaceURI()); assertNotEmpty(message,qname.getLocalPart()); } public static void assertNotEmpty(QName qname) { assertNotNull(qname); assertNotEmpty(qname.getNamespaceURI()); assertNotEmpty(qname.getLocalPart()); } public static <T> void assertAttribute(ShadowType shadow, ResourceType resource, String name, T... expectedValues) { assertAttribute("Wrong attribute " + name + " in "+shadow, shadow, new QName(ResourceTypeUtil.getResourceNamespace(resource), name), expectedValues); } public static <T> void assertAttribute(PrismObject<? extends ShadowType> shadow, ResourceType resource, String name, T... expectedValues) { assertAttribute("Wrong attribute " + name + " in "+shadow, shadow, new QName(ResourceTypeUtil.getResourceNamespace(resource), name), expectedValues); } public static <T> void assertAttribute(ShadowType shadowType, QName name, T... expectedValues) { assertAttribute(shadowType.asPrismObject(), name, expectedValues); } public static <T> void assertAttribute(PrismObject<? extends ShadowType> shadow, QName name, T... expectedValues) { Collection<T> values = getAttributeValues(shadow, name); assertEqualsCollection("Wrong value for attribute "+name+" in "+shadow, expectedValues, values); } public static <T> void assertAttribute(String message, ShadowType repoShadow, QName name, T... expectedValues) { Collection<T> values = getAttributeValues(repoShadow, name); assertEqualsCollection(message, expectedValues, values); } public static <T> void assertAttribute(String message, PrismObject<? extends ShadowType> repoShadow, QName name, T... expectedValues) { Collection<T> values = getAttributeValues(repoShadow, name); assertEqualsCollection(message, expectedValues, values); } public static <T> void assertNoAttribute(PrismObject<? extends ShadowType> shadow, QName name) { assertNull("Found attribute "+name+" in "+shadow+" while not expecting it", getAttributeValues(shadow, name)); } public static <T> void assertEqualsCollection(String message, Collection<T> expectedValues, Collection<T> actualValues) { if (expectedValues == null && actualValues == null) { return; } assert !(expectedValues == null && actualValues != null) : "Expecting null values but got "+actualValues; assert actualValues != null : message+": Expecting "+expectedValues+" but got null"; assertEquals(message+": Wrong number of values in " + actualValues, expectedValues.size(), actualValues.size()); for (T actualValue: actualValues) { boolean found = false; for (T value: expectedValues) { if (value.equals(actualValue)) { found = true; } } if (!found) { fail(message + ": Unexpected value "+actualValue+"; expected "+expectedValues+"; has "+actualValues); } } } public static <T> void assertEqualsCollection(String message, Collection<T> expectedValues, T[] actualValues) { assertEqualsCollection(message, expectedValues, Arrays.asList(actualValues)); } public static <T> void assertEqualsCollection(String message, T[] expectedValues, Collection<T> actualValues) { assertEqualsCollection(message, Arrays.asList(expectedValues), actualValues); } public static String getIcfsNameAttribute(PrismObject<ShadowType> shadow) { return getIcfsNameAttribute(shadow.asObjectable()); } public static String getIcfsNameAttribute(ShadowType shadowType) { return getAttributeValue(shadowType, SchemaTestConstants.ICFS_NAME); } public static String getSecondaryIdentifier(PrismObject<ShadowType> shadow) { Collection<ResourceAttribute<?>> secondaryIdentifiers = ShadowUtil.getSecondaryIdentifiers(shadow); if (secondaryIdentifiers == null || secondaryIdentifiers.isEmpty()) { return null; } if (secondaryIdentifiers.size() > 1) { throw new IllegalArgumentException("Too many secondary indentifiers in "+shadow); } return (String) secondaryIdentifiers.iterator().next().getRealValue(); } public static void assertSecondaryIdentifier(PrismObject<ShadowType> repoShadow, String value) { assertEquals("Wrong secondary indetifier in "+repoShadow, value, getSecondaryIdentifier(repoShadow)); } public static void assertIcfsNameAttribute(ShadowType repoShadow, String value) { assertAttribute(repoShadow, SchemaTestConstants.ICFS_NAME, value); } public static void assertIcfsNameAttribute(PrismObject<ShadowType> repoShadow, String value) { assertAttribute(repoShadow, SchemaTestConstants.ICFS_NAME, value); } public static void assertAttributeNotNull(PrismObject<ShadowType> repoShadow, QName name) { Collection<String> values = getAttributeValues(repoShadow, name); assertFalse("No values for "+name+" in "+repoShadow, values == null || values.isEmpty()); assertEquals(1, values.size()); assertNotNull(values.iterator().next()); } public static void assertAttributeNotNull(ShadowType repoShadow, QName name) { Collection<String> values = getAttributeValues(repoShadow, name); assertFalse("No values for "+name+" in "+repoShadow, values == null || values.isEmpty()); assertEquals(1, values.size()); assertNotNull(values.iterator().next()); } public static void assertAttributeNotNull(String message, ShadowType repoShadow, QName name) { Collection<String> values = getAttributeValues(repoShadow, name); assertFalse("No values for "+name+" in "+repoShadow, values == null || values.isEmpty()); assertEquals(message, 1, values.size()); assertNotNull(message, values.iterator().next()); } public static void assertAttributeDefinition(ResourceAttribute<?> attr, QName expectedType, int minOccurs, int maxOccurs, boolean canRead, boolean canCreate, boolean canUpdate, Class<?> expectedAttributeDefinitionClass) { ResourceAttributeDefinition definition = attr.getDefinition(); QName attrName = attr.getElementName(); assertNotNull("No definition for attribute "+attrName, definition); //assertEquals("Wrong class of definition for attribute"+attrName, expetcedAttributeDefinitionClass, definition.getClass()); assertTrue("Wrong class of definition for attribute"+attrName+" (expected: " + expectedAttributeDefinitionClass + ", real: " + definition.getClass() + ")", expectedAttributeDefinitionClass.isAssignableFrom(definition.getClass())); assertEquals("Wrong type in definition for attribute"+attrName, expectedType, definition.getTypeName()); assertEquals("Wrong minOccurs in definition for attribute"+attrName, minOccurs, definition.getMinOccurs()); assertEquals("Wrong maxOccurs in definition for attribute"+attrName, maxOccurs, definition.getMaxOccurs()); assertEquals("Wrong canRead in definition for attribute"+attrName, canRead, definition.canRead()); assertEquals("Wrong canCreate in definition for attribute"+attrName, canCreate, definition.canAdd()); assertEquals("Wrong canUpdate in definition for attribute"+attrName, canUpdate, definition.canModify()); } public static void assertProvisioningAccountShadow(PrismObject<ShadowType> account, ResourceType resourceType, Class<?> expetcedAttributeDefinitionClass) { assertProvisioningShadow(account,resourceType,expetcedAttributeDefinitionClass, new QName(ResourceTypeUtil.getResourceNamespace(resourceType), SchemaTestConstants.ICF_ACCOUNT_OBJECT_CLASS_LOCAL_NAME)); } public static void assertProvisioningShadow(PrismObject<ShadowType> account, ResourceType resourceType, Class<?> expetcedAttributeDefinitionClass, QName objectClass) { // Check attribute definition PrismContainer attributesContainer = account.findContainer(ShadowType.F_ATTRIBUTES); assertEquals("Wrong attributes container class", ResourceAttributeContainer.class, attributesContainer.getClass()); ResourceAttributeContainer rAttributesContainer = (ResourceAttributeContainer)attributesContainer; PrismContainerDefinition attrsDef = attributesContainer.getDefinition(); assertNotNull("No attributes container definition", attrsDef); assertTrue("Wrong attributes definition class "+attrsDef.getClass().getName(), attrsDef instanceof ResourceAttributeContainerDefinition); ResourceAttributeContainerDefinition rAttrsDef = (ResourceAttributeContainerDefinition)attrsDef; ObjectClassComplexTypeDefinition objectClassDef = rAttrsDef.getComplexTypeDefinition(); assertNotNull("No object class definition in attributes definition", objectClassDef); assertEquals("Wrong object class in attributes definition", objectClass, objectClassDef.getTypeName()); ResourceAttributeDefinition primaryIdDef = objectClassDef.getPrimaryIdentifiers().iterator().next(); ResourceAttribute<?> primaryIdAttr = rAttributesContainer.findAttribute(primaryIdDef.getName()); assertNotNull("No primary ID "+primaryIdDef.getName()+" in "+account, primaryIdAttr); assertAttributeDefinition(primaryIdAttr, DOMUtil.XSD_STRING, 0, 1, true, false, false, expetcedAttributeDefinitionClass); ResourceAttributeDefinition secondaryIdDef = objectClassDef.getSecondaryIdentifiers().iterator().next(); ResourceAttribute<Object> secondaryIdAttr = rAttributesContainer.findAttribute(secondaryIdDef.getName()); assertNotNull("No secondary ID "+secondaryIdDef.getName()+" in "+account, secondaryIdAttr); assertAttributeDefinition(secondaryIdAttr, DOMUtil.XSD_STRING, 1, 1, true, true, true, expetcedAttributeDefinitionClass); } public static <T> Collection<T> getAttributeValues(ShadowType shadowType, QName name) { return getAttributeValues(shadowType.asPrismObject(), name); } public static <T> Collection<T> getAttributeValues(PrismObject<? extends ShadowType> shadow, QName name) { if (shadow == null) { throw new IllegalArgumentException("No shadow"); } PrismContainer<?> attrCont = shadow.findContainer(ShadowType.F_ATTRIBUTES); if (attrCont == null) { return null; } PrismProperty<T> attrProp = attrCont.findProperty(name); if (attrProp == null) { return null; } return attrProp.getRealValues(); } public static String getAttributeValue(ShadowType repoShadow, QName name) { Collection<String> values = getAttributeValues(repoShadow, name); if (values == null || values.isEmpty()) { AssertJUnit.fail("Attribute "+name+" not found in shadow "+ObjectTypeUtil.toShortString(repoShadow)); } if (values.size() > 1) { AssertJUnit.fail("Too many values for attribute "+name+" in shadow "+ObjectTypeUtil.toShortString(repoShadow)); } return values.iterator().next(); } public static void waitFor(String message, Checker checker, int timeoutInterval) throws CommonException { waitFor(message, checker, timeoutInterval, WAIT_FOR_LOOP_SLEEP_MILIS); } public static void waitFor(String message, Checker checker, int timeoutInterval, long sleepInterval) throws CommonException { System.out.println(message); LOGGER.debug(LOG_MESSAGE_PREFIX + message); long startTime = System.currentTimeMillis(); while (System.currentTimeMillis() < startTime + timeoutInterval) { boolean done = checker.check(); if (done) { System.out.println("... done"); LOGGER.trace(LOG_MESSAGE_PREFIX + "... done " + message); return; } try { Thread.sleep(sleepInterval); } catch (InterruptedException e) { LOGGER.warn("Sleep interrupted: {}", e.getMessage(), e); } } // we have timeout System.out.println("Timeout while "+message); LOGGER.error(LOG_MESSAGE_PREFIX + "Timeout while " + message); // Invoke callback checker.timeout(); throw new RuntimeException("Timeout while "+message); } public static void displayJaxb(String title, Object o, QName defaultElementName) throws SchemaException { String serialized = PrismTestUtil.serializeAnyData(o, defaultElementName); System.out.println(OBJECT_TITLE_OUT_PREFIX + title); System.out.println(serialized); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + title + "\n" + serialized); } public static void display(String message) { System.out.println(OBJECT_TITLE_OUT_PREFIX + message); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message); } public static void display(String message, SearchResultEntry response) { System.out.println(OBJECT_TITLE_OUT_PREFIX + message); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message); display(response); } public static void display(Entry response) { System.out.println(response == null ? "null" : response.toLDIFString()); LOGGER.debug(response == null ? "null" : response.toLDIFString()); } public static void display(String message, Task task) { System.out.println(OBJECT_TITLE_OUT_PREFIX + message); System.out.println(task.debugDump()); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message + "\n" + task.debugDump()); } public static void display(String message, ObjectType o) { System.out.println(OBJECT_TITLE_OUT_PREFIX + message); System.out.println(ObjectTypeUtil.dump(o)); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message + "\n" + ObjectTypeUtil.dump(o)); } public static void display(String message, Collection collection) { String dump = DebugUtil.dump(collection); System.out.println(OBJECT_TITLE_OUT_PREFIX + message + "\n" + dump); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message + "\n" + dump); } public static void display(String title, Entry entry) { System.out.println(OBJECT_TITLE_OUT_PREFIX + title); String ldif = null; if (entry != null) { ldif = entry.toLDIFString(); } System.out.println(ldif); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + title + "\n" + ldif); } public static void display(String message, PrismContainer<?> propertyContainer) { System.out.println(OBJECT_TITLE_OUT_PREFIX + message); System.out.println(propertyContainer == null ? "null" : propertyContainer.debugDump()); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message + "\n" + (propertyContainer == null ? "null" : propertyContainer.debugDump())); } public static void display(OperationResult result) { display("Result of "+result.getOperation(), result); } public static void display(String title, OperationResult result) { System.out.println(OBJECT_TITLE_OUT_PREFIX + title); System.out.println(result.debugDump()); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + title + "\n" + result.debugDump()); } public static void display(String title, OperationResultType result) throws SchemaException { displayJaxb(title, result, SchemaConstants.C_RESULT); } public static void display(String title, List<Element> elements) { System.out.println(OBJECT_TITLE_OUT_PREFIX + title); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + title); for(Element e : elements) { String s = DOMUtil.serializeDOMToString(e); System.out.println(s); LOGGER.debug(s); } } public static void display(String title, DebugDumpable dumpable) { System.out.println(OBJECT_TITLE_OUT_PREFIX + title); System.out.println(dumpable == null ? "null" : dumpable.debugDump()); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + title + "\n" + (dumpable == null ? "null" : dumpable.debugDump())); } public static void display(String title, String value) { System.out.println(OBJECT_TITLE_OUT_PREFIX + title); System.out.println(value); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + title + "\n" + value); } public static void display(String title, Object value) { System.out.println(OBJECT_TITLE_OUT_PREFIX + title); System.out.println(SchemaDebugUtil.prettyPrint(value)); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + title + "\n" + SchemaDebugUtil.prettyPrint(value)); } public static void display(String title, Containerable value) { System.out.println(OBJECT_TITLE_OUT_PREFIX + title); System.out.println(SchemaDebugUtil.prettyPrint(value.asPrismContainerValue().debugDump())); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + title + "\n" + SchemaDebugUtil.prettyPrint(value.asPrismContainerValue().debugDump())); } public static void display(String title, Throwable e) { String stackTrace = ExceptionUtils.getStackTrace(e); System.out.println(OBJECT_TITLE_OUT_PREFIX + title + ": "+e.getClass() + " " + e.getMessage()); System.out.println(stackTrace); LOGGER.debug("{}{}: {} {}\n{}", new Object[]{ OBJECT_TITLE_LOG_PREFIX, title, e.getClass(), e.getMessage(), stackTrace}); } public static void displayPrismValuesCollection(String message, Collection<? extends PrismValue> collection) { System.out.println(OBJECT_TITLE_OUT_PREFIX + message); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message); for (PrismValue v : collection) { System.out.println(DebugUtil.debugDump(v)); LOGGER.debug("{}", DebugUtil.debugDump(v)); System.out.println(OBJECT_LIST_SEPARATOR); LOGGER.debug(OBJECT_LIST_SEPARATOR); } } public static void displayContainerablesCollection(String message, Collection<? extends Containerable> collection) { System.out.println(OBJECT_TITLE_OUT_PREFIX + message); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message); for (Containerable c : CollectionUtils.emptyIfNull(collection)) { String s = DebugUtil.debugDump(c.asPrismContainerValue()); System.out.println(s); LOGGER.debug("{}", s); System.out.println(OBJECT_LIST_SEPARATOR); LOGGER.debug(OBJECT_LIST_SEPARATOR); } } public static void displayCollection(String message, Collection<? extends DebugDumpable> collection) { System.out.println(OBJECT_TITLE_OUT_PREFIX + message); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message); for (DebugDumpable c : CollectionUtils.emptyIfNull(collection)) { String s = DebugUtil.debugDump(c); System.out.println(s); LOGGER.debug("{}", s); System.out.println(OBJECT_LIST_SEPARATOR); LOGGER.debug(OBJECT_LIST_SEPARATOR); } } public static void displayObjectTypeCollection(String message, Collection<? extends ObjectType> collection) { System.out.println(OBJECT_TITLE_OUT_PREFIX + message); LOGGER.debug(OBJECT_TITLE_LOG_PREFIX + message); for (ObjectType o : CollectionUtils.emptyIfNull(collection)) { System.out.println(ObjectTypeUtil.dump(o)); LOGGER.debug(ObjectTypeUtil.dump(o)); System.out.println(OBJECT_LIST_SEPARATOR); LOGGER.debug(OBJECT_LIST_SEPARATOR); } } public static <O extends ObjectType> void assertSearchResultNames(SearchResultList<PrismObject<O>> resultList, MatchingRule<String> matchingRule, String... expectedNames) throws SchemaException { List<String> names = new ArrayList<>(expectedNames.length); for(PrismObject<O> obj: resultList) { names.add(obj.asObjectable().getName().getOrig()); } PrismAsserts.assertSets("Unexpected search result", matchingRule, names, expectedNames); } public static <O extends ObjectType> void assertSearchResultNames(SearchResultList<PrismObject<O>> resultList, String... expectedNames) { List<String> names = new ArrayList<>(expectedNames.length); for(PrismObject<O> obj: resultList) { names.add(obj.asObjectable().getName().getOrig()); } PrismAsserts.assertSets("Unexpected search result", names, expectedNames); } public static void checkAllShadows(ResourceType resourceType, RepositoryService repositoryService, ObjectChecker<ShadowType> checker, PrismContext prismContext) throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException { OperationResult result = new OperationResult(IntegrationTestTools.class.getName() + ".checkAllShadows"); ObjectQuery query = createAllShadowsQuery(resourceType, prismContext); List<PrismObject<ShadowType>> allShadows = repositoryService.searchObjects(ShadowType.class, query, null, result); LOGGER.trace("Checking {} shadows, query:\n{}", allShadows.size(), query.debugDump()); for (PrismObject<ShadowType> shadow: allShadows) { checkShadow(shadow.asObjectable(), resourceType, repositoryService, checker, prismContext, result); } } public static ObjectQuery createAllShadowsQuery(ResourceType resourceType, PrismContext prismContext) throws SchemaException { return QueryBuilder.queryFor(ShadowType.class, prismContext) .item(ShadowType.F_RESOURCE_REF).ref(resourceType.getOid()) .build(); } public static ObjectQuery createAllShadowsQuery(ResourceType resourceType, QName objectClass, PrismContext prismContext) throws SchemaException { return QueryBuilder.queryFor(ShadowType.class, prismContext) .item(ShadowType.F_RESOURCE_REF).ref(resourceType.getOid()) .and().item(ShadowType.F_OBJECT_CLASS).eq(objectClass) .build(); } public static ObjectQuery createAllShadowsQuery(ResourceType resourceType, String objectClassLocalName, PrismContext prismContext) throws SchemaException { return createAllShadowsQuery(resourceType, new QName(ResourceTypeUtil.getResourceNamespace(resourceType), objectClassLocalName), prismContext); } public static void checkAccountShadow(ShadowType shadowType, ResourceType resourceType, RepositoryService repositoryService, ObjectChecker<ShadowType> checker, PrismContext prismContext, OperationResult parentResult) throws SchemaException { checkAccountShadow(shadowType, resourceType, repositoryService, checker, null, prismContext, parentResult); } public static void checkAccountShadow(ShadowType shadowType, ResourceType resourceType, RepositoryService repositoryService, ObjectChecker<ShadowType> checker, MatchingRule<String> uidMatchingRule, PrismContext prismContext, OperationResult parentResult) throws SchemaException { checkShadow(shadowType, resourceType, repositoryService, checker, uidMatchingRule, prismContext, parentResult); assertEquals(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), SchemaTestConstants.ICF_ACCOUNT_OBJECT_CLASS_LOCAL_NAME), shadowType.getObjectClass()); } public static void checkEntitlementShadow(ShadowType shadowType, ResourceType resourceType, RepositoryService repositoryService, ObjectChecker<ShadowType> checker, String objectClassLocalName, PrismContext prismContext, OperationResult parentResult) throws SchemaException { checkEntitlementShadow(shadowType, resourceType, repositoryService, checker, objectClassLocalName, null, prismContext, parentResult); } public static void checkEntitlementShadow(ShadowType shadowType, ResourceType resourceType, RepositoryService repositoryService, ObjectChecker<ShadowType> checker, String objectClassLocalName, MatchingRule<String> uidMatchingRule, PrismContext prismContext, OperationResult parentResult) throws SchemaException { checkShadow(shadowType, resourceType, repositoryService, checker, uidMatchingRule, prismContext, parentResult); assertEquals(new QName(ResourceTypeUtil.getResourceNamespace(resourceType), objectClassLocalName), shadowType.getObjectClass()); } public static void checkShadow(ShadowType shadowType, ResourceType resourceType, RepositoryService repositoryService, ObjectChecker<ShadowType> checker, PrismContext prismContext, OperationResult parentResult) throws SchemaException { checkShadow(shadowType, resourceType, repositoryService, checker, null, prismContext, parentResult); } public static void checkShadow(ShadowType shadowType, ResourceType resourceType, RepositoryService repositoryService, ObjectChecker<ShadowType> checker, MatchingRule<String> uidMatchingRule, PrismContext prismContext, OperationResult parentResult) throws SchemaException { LOGGER.trace("Checking shadow:\n{}",shadowType.asPrismObject().debugDump()); shadowType.asPrismObject().checkConsistence(true, true, ConsistencyCheckScope.THOROUGH); assertNotNull("no OID",shadowType.getOid()); assertNotNull("no name",shadowType.getName()); assertEquals(resourceType.getOid(), shadowType.getResourceRef().getOid()); PrismContainer<?> attrs = shadowType.asPrismObject().findContainer(ShadowType.F_ATTRIBUTES); assertNotNull("no attributes",attrs); assertFalse("empty attributes",attrs.isEmpty()); RefinedResourceSchema rschema = RefinedResourceSchemaImpl.getRefinedSchema(resourceType); ObjectClassComplexTypeDefinition objectClassDef = rschema.findObjectClassDefinition(shadowType); assertNotNull("cannot determine object class for "+shadowType, objectClassDef); String icfUid = ShadowUtil.getSingleStringAttributeValue(shadowType, SchemaTestConstants.ICFS_UID); if (icfUid == null) { Collection<? extends ResourceAttributeDefinition> identifierDefs = objectClassDef.getPrimaryIdentifiers(); assertFalse("No identifiers for "+objectClassDef, identifierDefs == null || identifierDefs.isEmpty()); for (ResourceAttributeDefinition idDef: identifierDefs) { String id = ShadowUtil.getSingleStringAttributeValue(shadowType, idDef.getName()); assertNotNull("No identifier "+idDef.getName()+" in "+shadowType, id); } } String resourceOid = ShadowUtil.getResourceOid(shadowType); assertNotNull("No resource OID in "+shadowType, resourceOid); assertNotNull("Null OID in "+shadowType, shadowType.getOid()); PrismObject<ShadowType> repoShadow = null; try { repoShadow = repositoryService.getObject(ShadowType.class, shadowType.getOid(), null, parentResult); } catch (Exception e) { AssertJUnit.fail("Got exception while trying to read "+shadowType+ ": "+e.getCause()+": "+e.getMessage()); } checkShadowUniqueness(shadowType, objectClassDef, repositoryService, uidMatchingRule, prismContext, parentResult); String repoResourceOid = ShadowUtil.getResourceOid(repoShadow.asObjectable()); assertNotNull("No resource OID in the repository shadow "+repoShadow); assertEquals("Resource OID mismatch", resourceOid, repoResourceOid); try { repositoryService.getObject(ResourceType.class, resourceOid, null, parentResult); } catch (Exception e) { AssertJUnit.fail("Got exception while trying to read resource "+resourceOid+" as specified in current shadow "+shadowType+ ": "+e.getCause()+": "+e.getMessage()); } if (checker != null) { checker.check(shadowType); } } /** * Checks i there is only a single shadow in repo for this account. */ private static void checkShadowUniqueness(ShadowType resourceShadow, ObjectClassComplexTypeDefinition objectClassDef, RepositoryService repositoryService, MatchingRule<String> uidMatchingRule, PrismContext prismContext, OperationResult parentResult) { try { ObjectQuery query = createShadowQuery(resourceShadow, objectClassDef, uidMatchingRule, prismContext); List<PrismObject<ShadowType>> results = repositoryService.searchObjects(ShadowType.class, query, null, parentResult); LOGGER.trace("Shadow check with filter\n{}\n found {} objects", query.debugDump(), results.size()); if (results.size() == 0) { AssertJUnit.fail("No shadow found with query:\n"+query.debugDump()); } if (results.size() == 1) { return; } if (results.size() > 1) { for (PrismObject<ShadowType> result: results) { LOGGER.trace("Search result:\n{}", result.debugDump()); } LOGGER.error("More than one shadows found for " + resourceShadow); // TODO: Better error handling later throw new IllegalStateException("More than one shadows found for " + resourceShadow); } } catch (SchemaException e) { throw new SystemException(e); } } private static ObjectQuery createShadowQuery(ShadowType resourceShadow, ObjectClassComplexTypeDefinition objectClassDef, MatchingRule<String> uidMatchingRule, PrismContext prismContext) throws SchemaException { PrismContainer<?> attributesContainer = resourceShadow.asPrismObject().findContainer(ShadowType.F_ATTRIBUTES); QName identifierName = objectClassDef.getPrimaryIdentifiers().iterator().next().getName(); PrismProperty<String> identifier = attributesContainer.findProperty(identifierName); if (identifier == null) { throw new SchemaException("No identifier in "+resourceShadow); } String identifierValue = identifier.getRealValue(); if (uidMatchingRule != null) { identifierValue = uidMatchingRule.normalize(identifierValue); } PrismPropertyDefinition<String> identifierDef = identifier.getDefinition(); return QueryBuilder.queryFor(ShadowType.class, prismContext) .item(ShadowType.F_RESOURCE_REF).ref(ShadowUtil.getResourceOid(resourceShadow)) .and().item(new ItemPath(ShadowType.F_ATTRIBUTES, identifierDef.getName()), identifierDef).eq(identifierValue) .build(); } public static void applyResourceSchema(ShadowType accountType, ResourceType resourceType, PrismContext prismContext) throws SchemaException { ResourceSchema resourceSchema = RefinedResourceSchemaImpl.getResourceSchema(resourceType, prismContext); ShadowUtil.applyResourceSchema(accountType.asPrismObject(), resourceSchema); } public static void assertInMessageRecursive(Throwable e, String substring) { assert hasInMessageRecursive(e, substring) : "The substring '"+substring+"' was NOT found in the message of exception "+e+" (including cause exceptions)"; } public static boolean hasInMessageRecursive(Throwable e, String substring) { if (e.getMessage().contains(substring)) { return true; } if (e.getCause() != null) { return hasInMessageRecursive(e.getCause(), substring); } return false; } public static void assertNotInMessageRecursive(Throwable e, String substring) { assert !e.getMessage().contains(substring) : "The substring '"+substring+"' was found in the message of exception "+e+": "+e.getMessage(); if (e.getCause() != null) { assertNotInMessageRecursive(e.getCause(), substring); } } public static void assertNoRepoCache() { if (RepositoryCache.exists()) { AssertJUnit.fail("Cache exists! " + RepositoryCache.debugDump()); } } public static void assertScripts(List<ScriptHistoryEntry> scriptsHistory, ProvisioningScriptSpec... expectedScripts) { displayScripts(scriptsHistory); assertEquals("Wrong number of scripts executed", expectedScripts.length, scriptsHistory.size()); Iterator<ScriptHistoryEntry> historyIter = scriptsHistory.iterator(); for (ProvisioningScriptSpec expecedScript: expectedScripts) { ScriptHistoryEntry actualScript = historyIter.next(); assertEquals("Wrong script code", expecedScript.getCode(), actualScript.getCode()); if (expecedScript.getLanguage() == null) { assertEquals("We talk only gibberish here", "Gibberish", actualScript.getLanguage()); } else { assertEquals("Wrong script language", expecedScript.getLanguage(), actualScript.getLanguage()); } assertEquals("Wrong number of arguments", expecedScript.getArgs().size(), actualScript.getParams().size()); for (java.util.Map.Entry<String,Object> expectedEntry: expecedScript.getArgs().entrySet()) { Object expectedValue = expectedEntry.getValue(); Object actualVal = actualScript.getParams().get(expectedEntry.getKey()); assertEquals("Wrong value for argument '"+expectedEntry.getKey()+"'", expectedValue, actualVal); } } } public static void displayScripts(List<ScriptHistoryEntry> scriptsHistory) { for (ScriptHistoryEntry script : scriptsHistory) { display("Script", script); } } public static <T> void assertExtensionProperty(PrismObject<? extends ObjectType> object, QName propertyName, T... expectedValues) { PrismContainer<?> extension = object.getExtension(); PrismAsserts.assertPropertyValue(extension, propertyName, expectedValues); } public static <T> void assertNoExtensionProperty(PrismObject<? extends ObjectType> object, QName propertyName) { PrismContainer<?> extension = object.getExtension(); PrismAsserts.assertNoItem(extension, propertyName); } public static void assertIcfResourceSchemaSanity(ResourceSchema resourceSchema, ResourceType resourceType) { assertNotNull("No resource schema in "+resourceType, resourceSchema); QName objectClassQname = new QName(ResourceTypeUtil.getResourceNamespace(resourceType), "AccountObjectClass"); ObjectClassComplexTypeDefinition accountDefinition = resourceSchema.findObjectClassDefinition(objectClassQname); assertNotNull("No object class definition for "+objectClassQname+" in resource schema", accountDefinition); ObjectClassComplexTypeDefinition accountDef = resourceSchema.findDefaultObjectClassDefinition(ShadowKindType.ACCOUNT); assertTrue("Mismatched account definition: "+accountDefinition+" <-> "+accountDef, accountDefinition == accountDef); assertNotNull("No object class definition " + objectClassQname, accountDefinition); assertEquals("Object class " + objectClassQname + " is not account", ShadowKindType.ACCOUNT, accountDefinition.getKind()); assertTrue("Object class " + objectClassQname + " is not default account", accountDefinition.isDefaultInAKind()); assertFalse("Object class " + objectClassQname + " is empty", accountDefinition.isEmpty()); assertFalse("Object class " + objectClassQname + " is empty", accountDefinition.isIgnored()); Collection<? extends ResourceAttributeDefinition> identifiers = accountDefinition.getPrimaryIdentifiers(); assertNotNull("Null identifiers for " + objectClassQname, identifiers); assertFalse("Empty identifiers for " + objectClassQname, identifiers.isEmpty()); ResourceAttributeDefinition uidAttributeDefinition = accountDefinition.findAttributeDefinition(SchemaTestConstants.ICFS_UID); assertNotNull("No definition for attribute "+SchemaTestConstants.ICFS_UID, uidAttributeDefinition); assertTrue("Attribute "+SchemaTestConstants.ICFS_UID+" in not an identifier",uidAttributeDefinition.isIdentifier(accountDefinition)); assertTrue("Attribute "+SchemaTestConstants.ICFS_UID+" in not in identifiers list",identifiers.contains(uidAttributeDefinition)); assertEquals("Wrong displayName for attribute "+SchemaTestConstants.ICFS_UID, "ConnId UID", uidAttributeDefinition.getDisplayName()); assertEquals("Wrong displayOrder for attribute "+SchemaTestConstants.ICFS_UID, (Integer)100, uidAttributeDefinition.getDisplayOrder()); Collection<? extends ResourceAttributeDefinition> secondaryIdentifiers = accountDefinition.getSecondaryIdentifiers(); assertNotNull("Null secondary identifiers for " + objectClassQname, secondaryIdentifiers); assertFalse("Empty secondary identifiers for " + objectClassQname, secondaryIdentifiers.isEmpty()); ResourceAttributeDefinition nameAttributeDefinition = accountDefinition.findAttributeDefinition(SchemaTestConstants.ICFS_NAME); assertNotNull("No definition for attribute "+SchemaTestConstants.ICFS_NAME, nameAttributeDefinition); assertTrue("Attribute "+SchemaTestConstants.ICFS_NAME+" in not an identifier",nameAttributeDefinition.isSecondaryIdentifier(accountDefinition)); assertTrue("Attribute "+SchemaTestConstants.ICFS_NAME+" in not in identifiers list",secondaryIdentifiers.contains(nameAttributeDefinition)); assertEquals("Wrong displayName for attribute "+SchemaTestConstants.ICFS_NAME, "ConnId Name", nameAttributeDefinition.getDisplayName()); assertEquals("Wrong displayOrder for attribute "+SchemaTestConstants.ICFS_NAME, (Integer)110, nameAttributeDefinition.getDisplayOrder()); assertNotNull("Null identifiers in account", accountDef.getPrimaryIdentifiers()); assertFalse("Empty identifiers in account", accountDef.getPrimaryIdentifiers().isEmpty()); assertNotNull("Null secondary identifiers in account", accountDef.getSecondaryIdentifiers()); assertFalse("Empty secondary identifiers in account", accountDef.getSecondaryIdentifiers().isEmpty()); assertNotNull("No naming attribute in account", accountDef.getNamingAttribute()); assertFalse("No nativeObjectClass in account", StringUtils.isEmpty(accountDef.getNativeObjectClass())); ResourceAttributeDefinition uidDef = accountDef .findAttributeDefinition(SchemaTestConstants.ICFS_UID); assertEquals(1, uidDef.getMaxOccurs()); assertEquals(0, uidDef.getMinOccurs()); assertFalse("No UID display name", StringUtils.isBlank(uidDef.getDisplayName())); assertFalse("UID has create", uidDef.canAdd()); assertFalse("UID has update",uidDef.canModify()); assertTrue("No UID read",uidDef.canRead()); assertTrue("UID definition not in identifiers", accountDef.getPrimaryIdentifiers().contains(uidDef)); assertEquals("Wrong refined displayName for attribute "+SchemaTestConstants.ICFS_UID, "ConnId UID", uidDef.getDisplayName()); assertEquals("Wrong refined displayOrder for attribute "+SchemaTestConstants.ICFS_UID, (Integer)100, uidDef.getDisplayOrder()); ResourceAttributeDefinition nameDef = accountDef .findAttributeDefinition(SchemaTestConstants.ICFS_NAME); assertEquals(1, nameDef.getMaxOccurs()); assertEquals(1, nameDef.getMinOccurs()); assertFalse("No NAME displayName", StringUtils.isBlank(nameDef.getDisplayName())); assertTrue("No NAME create", nameDef.canAdd()); assertTrue("No NAME update",nameDef.canModify()); assertTrue("No NAME read",nameDef.canRead()); assertTrue("NAME definition not in identifiers", accountDef.getSecondaryIdentifiers().contains(nameDef)); assertEquals("Wrong refined displayName for attribute "+SchemaTestConstants.ICFS_NAME, "ConnId Name", nameDef.getDisplayName()); assertEquals("Wrong refined displayOrder for attribute "+SchemaTestConstants.ICFS_NAME, (Integer)110, nameDef.getDisplayOrder()); assertNull("The _PASSSWORD_ attribute sneaked into schema", accountDef.findAttributeDefinition(new QName(SchemaTestConstants.NS_ICFS,"password"))); } //TODO: add language parameter..for now, use xml serialization public static void displayXml(String message, PrismObject<? extends ObjectType> object) throws SchemaException { String xml = PrismTestUtil.serializeObjectToString(object, PrismContext.LANG_XML); display(message, xml); } public static ObjectDelta<ShadowType> createEntitleDelta(String accountOid, QName associationName, String groupOid, PrismContext prismContext) throws SchemaException { ShadowAssociationType association = new ShadowAssociationType(); association.setName(associationName); ObjectReferenceType shadowRefType = new ObjectReferenceType(); shadowRefType.setOid(groupOid); shadowRefType.setType(ShadowType.COMPLEX_TYPE); association.setShadowRef(shadowRefType); ItemPath entitlementAssociationPath = new ItemPath(ShadowType.F_ASSOCIATION); ObjectDelta<ShadowType> delta = ObjectDelta.createModificationAddContainer(ShadowType.class, accountOid, entitlementAssociationPath, prismContext, association); return delta; } public static ObjectDelta<ShadowType> createDetitleDelta(String accountOid, QName associationName, String groupOid, PrismContext prismContext) throws SchemaException { ShadowAssociationType association = new ShadowAssociationType(); association.setName(associationName); ObjectReferenceType shadowRefType = new ObjectReferenceType(); shadowRefType.setOid(groupOid); shadowRefType.setType(ShadowType.COMPLEX_TYPE); association.setShadowRef(shadowRefType); ItemPath entitlementAssociationPath = new ItemPath(ShadowType.F_ASSOCIATION); ObjectDelta<ShadowType> delta = ObjectDelta.createModificationDeleteContainer(ShadowType.class, accountOid, entitlementAssociationPath, prismContext, association); return delta; } public static void assertGroupMember(DummyGroup group, String accountId) { assertGroupMember(group, accountId, false); } public static void assertGroupMember(DummyGroup group, String accountId, boolean caseIgnore) { Collection<String> members = group.getMembers(); assertNotNull("No members in group "+group.getName()+", expected that "+accountId+" will be there", members); if (caseIgnore) { for (String member: members) { if (StringUtils.equalsIgnoreCase(accountId, member)) { return; } } AssertJUnit.fail("Account "+accountId+" is not member of group "+group.getName()+", members: "+members); } else { assertTrue("Account "+accountId+" is not member of group "+group.getName()+", members: "+members, members.contains(accountId)); } } public static void assertNoGroupMember(DummyGroup group, String accountId) { Collection<String> members = group.getMembers(); if (members == null) { return; } assertFalse("Account "+accountId+" IS member of group "+group.getName()+" while not expecting it, members: "+members, members.contains(accountId)); } public static void assertNoGroupMembers(DummyGroup group) { Collection<String> members = group.getMembers(); assertTrue("Group "+group.getName()+" has members while not expecting it, members: "+members, members == null || members.isEmpty()); } public static ShadowAssociationType assertAssociation(PrismObject<ShadowType> shadow, QName associationName, String entitlementOid) { ShadowType accountType = shadow.asObjectable(); List<ShadowAssociationType> associations = accountType.getAssociation(); assertNotNull("Null associations in "+shadow, associations); assertFalse("Empty associations in "+shadow, associations.isEmpty()); for (ShadowAssociationType association: associations) { if (associationName.equals(association.getName()) && association.getShadowRef() != null && entitlementOid.equals(association.getShadowRef().getOid())) { return association; } } AssertJUnit.fail("No association for entitlement "+entitlementOid+" in "+shadow); throw new IllegalStateException("not reached"); } public static void assertNoAssociation(PrismObject<ShadowType> shadow, QName associationName, String entitlementOid) { ShadowType accountType = shadow.asObjectable(); List<ShadowAssociationType> associations = accountType.getAssociation(); if (associations == null) { return; } for (ShadowAssociationType association: associations) { if (associationName.equals(association.getName()) && entitlementOid.equals(association.getShadowRef().getOid())) { AssertJUnit.fail("Unexpected association for entitlement "+entitlementOid+" in "+shadow); } } } public static void assertNoSchema(ResourceType resourceType) { assertNoSchema("Found schema in resource "+resourceType+" while not expecting it", resourceType); } public static void assertNoSchema(String message, ResourceType resourceType) { Element resourceXsdSchema = ResourceTypeUtil.getResourceXsdSchema(resourceType); AssertJUnit.assertNull(message, resourceXsdSchema); } public static void assertConnectorSchemaSanity(ConnectorType conn, PrismContext prismContext) throws SchemaException { XmlSchemaType xmlSchemaType = conn.getSchema(); assertNotNull("xmlSchemaType is null",xmlSchemaType); Element connectorXsdSchemaElement = ConnectorTypeUtil.getConnectorXsdSchema(conn); assertNotNull("No schema", connectorXsdSchemaElement); Element xsdElement = ObjectTypeUtil.findXsdElement(xmlSchemaType); assertNotNull("No xsd:schema element in xmlSchemaType",xsdElement); display("XSD schema of "+conn, DOMUtil.serializeDOMToString(xsdElement)); // Try to parse the schema PrismSchema schema = null; try { schema = PrismSchemaImpl.parse(xsdElement, true, "schema of "+conn, prismContext); } catch (SchemaException e) { throw new SchemaException("Error parsing schema of "+conn+": "+e.getMessage(),e); } assertConnectorSchemaSanity(schema, conn.toString(), SchemaConstants.ICF_FRAMEWORK_URI.equals(conn.getFramework())); } public static void assertConnectorSchemaSanity(PrismSchema schema, String connectorDescription, boolean expectConnIdSchema) { assertNotNull("Cannot parse connector schema of "+connectorDescription,schema); assertFalse("Empty connector schema in "+connectorDescription,schema.isEmpty()); display("Parsed connector schema of "+connectorDescription,schema); // Local schema namespace is used here. PrismContainerDefinition configurationDefinition = schema.findItemDefinition(ResourceType.F_CONNECTOR_CONFIGURATION.getLocalPart(), PrismContainerDefinition.class); assertNotNull("Definition of <configuration> property container not found in connector schema of "+connectorDescription, configurationDefinition); assertFalse("Empty definition of <configuration> property container in connector schema of "+connectorDescription, configurationDefinition.isEmpty()); if (expectConnIdSchema) { // ICFC schema is used on other elements PrismContainerDefinition configurationPropertiesDefinition = configurationDefinition.findContainerDefinition(SchemaConstants.CONNECTOR_SCHEMA_CONFIGURATION_PROPERTIES_ELEMENT_QNAME); assertNotNull("Definition of <configurationProperties> property container not found in connector schema of "+connectorDescription, configurationPropertiesDefinition); assertFalse("Empty definition of <configurationProperties> property container in connector schema of "+connectorDescription, configurationPropertiesDefinition.isEmpty()); assertFalse("No definitions in <configurationProperties> in "+connectorDescription, configurationPropertiesDefinition.getDefinitions().isEmpty()); // TODO: other elements } } public static void clearLog() throws IOException { RandomAccessFile file = new RandomAccessFile("target/test.log", "rw"); file.setLength(0); file.close(); System.out.println("Log cleared."); } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.media; import android.graphics.Bitmap; import android.graphics.PointF; import android.util.Log; import java.lang.IllegalArgumentException; /** * Identifies the faces of people in a * {@link android.graphics.Bitmap} graphic object. */ public class FaceDetector { /** * A Face contains all the information identifying the location * of a face in a bitmap. */ public class Face { /** The minimum confidence factor of good face recognition */ public static final float CONFIDENCE_THRESHOLD = 0.4f; /** The x-axis Euler angle of a face. */ public static final int EULER_X = 0; /** The y-axis Euler angle of a face. */ public static final int EULER_Y = 1; /** The z-axis Euler angle of a face. */ public static final int EULER_Z = 2; /** * Returns a confidence factor between 0 and 1. This indicates how * certain what has been found is actually a face. A confidence * factor above 0.3 is usually good enough. */ public float confidence() { return mConfidence; } /** * Sets the position of the mid-point between the eyes. * @param point the PointF coordinates (float values) of the * face's mid-point */ public void getMidPoint(PointF point) { // don't return a PointF to avoid allocations point.set(mMidPointX, mMidPointY); } /** * Returns the distance between the eyes. */ public float eyesDistance() { return mEyesDist; } /** * Returns the face's pose. That is, the rotations around either * the X, Y or Z axis (the positions in 3-dimensional Euclidean space). * * @param euler the Euler axis to retrieve an angle from * (<var>EULER_X</var>, <var>EULER_Y</var> or * <var>EULER_Z</var>) * @return the Euler angle of the of the face, for the given axis */ public float pose(int euler) { // don't use an array to avoid allocations if (euler == EULER_X) return mPoseEulerX; else if (euler == EULER_Y) return mPoseEulerY; else if (euler == EULER_Z) return mPoseEulerZ; throw new IllegalArgumentException(); } // private ctor, user not supposed to build this object private Face() { } private float mConfidence; private float mMidPointX; private float mMidPointY; private float mEyesDist; private float mPoseEulerX; private float mPoseEulerY; private float mPoseEulerZ; } /** * Creates a FaceDetector, configured with the size of the images to * be analysed and the maximum number of faces that can be detected. * These parameters cannot be changed once the object is constructed. * Note that the width of the image must be even. * * @param width the width of the image * @param height the height of the image * @param maxFaces the maximum number of faces to identify * */ public FaceDetector(int width, int height, int maxFaces) { if (!sInitialized) { return; } fft_initialize(width, height, maxFaces); mWidth = width; mHeight = height; mMaxFaces = maxFaces; mBWBuffer = new byte[width * height]; } /** * Finds all the faces found in a given {@link android.graphics.Bitmap}. * The supplied array is populated with {@link FaceDetector.Face}s for each * face found. The bitmap must be in 565 format (for now). * * @param bitmap the {@link android.graphics.Bitmap} graphic to be analyzed * @param faces an array in which to place all found * {@link FaceDetector.Face}s. The array must be sized equal * to the <var>maxFaces</var> value set at initialization * @return the number of faces found * @throws IllegalArgumentException if the Bitmap dimensions don't match * the dimensions defined at initialization or the given array * is not sized equal to the <var>maxFaces</var> value defined * at initialization */ public int findFaces(Bitmap bitmap, Face[] faces) { if (!sInitialized) { return 0; } if (bitmap.getWidth() != mWidth || bitmap.getHeight() != mHeight) { throw new IllegalArgumentException( "bitmap size doesn't match initialization"); } if (faces.length < mMaxFaces) { throw new IllegalArgumentException( "faces[] smaller than maxFaces"); } int numFaces = fft_detect(bitmap); if (numFaces >= mMaxFaces) numFaces = mMaxFaces; for (int i=0 ; i<numFaces ; i++) { if (faces[i] == null) faces[i] = new Face(); fft_get_face(faces[i], i); } return numFaces; } /* no user serviceable parts here ... */ @Override protected void finalize() throws Throwable { fft_destroy(); } /* * We use a class initializer to allow the native code to cache some * field offsets. */ private static boolean sInitialized; native private static void nativeClassInit(); static { sInitialized = false; try { System.loadLibrary("FFTEm"); nativeClassInit(); sInitialized = true; } catch (UnsatisfiedLinkError e) { Log.d("FFTEm", "face detection library not found!"); } } native private int fft_initialize(int width, int height, int maxFaces); native private int fft_detect(Bitmap bitmap); native private void fft_get_face(Face face, int i); native private void fft_destroy(); private int mFD; private int mSDK; private int mDCR; private int mWidth; private int mHeight; private int mMaxFaces; private byte mBWBuffer[]; }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2016.06.11 at 05:34:27 PM UYT // package dgi.classes.entreEmpresas; import java.math.BigInteger; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import javax.xml.datatype.XMLGregorianCalendar; /** * <p>Java class for IdDoc_Fact complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="IdDoc_Fact"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="TipoCFE"> * &lt;simpleType> * &lt;restriction base="{http://cfe.dgi.gub.uy}CFEType"> * &lt;enumeration value="111"/> * &lt;enumeration value="112"/> * &lt;enumeration value="113"/> * &lt;enumeration value="141"/> * &lt;enumeration value="142"/> * &lt;enumeration value="143"/> * &lt;enumeration value="211"/> * &lt;enumeration value="212"/> * &lt;enumeration value="213"/> * &lt;enumeration value="241"/> * &lt;enumeration value="242"/> * &lt;enumeration value="243"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element name="Serie" type="{http://cfe.dgi.gub.uy}SerieType"/> * &lt;element name="Nro" type="{http://cfe.dgi.gub.uy}NroCFEType"/> * &lt;element name="FchEmis" type="{http://cfe.dgi.gub.uy}FechaType"/> * &lt;element name="PeriodoDesde" type="{http://cfe.dgi.gub.uy}Fecha_d2000_Type" minOccurs="0"/> * &lt;element name="PeriodoHasta" type="{http://cfe.dgi.gub.uy}Fecha_d2000_Type" minOccurs="0"/> * &lt;element name="MntBruto" minOccurs="0"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}integer"> * &lt;enumeration value="1"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element name="FmaPago"> * &lt;simpleType> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}positiveInteger"> * &lt;enumeration value="1"/> * &lt;enumeration value="2"/> * &lt;/restriction> * &lt;/simpleType> * &lt;/element> * &lt;element name="FchVenc" type="{http://cfe.dgi.gub.uy}Fecha_d2000_Type" minOccurs="0"/> * &lt;element name="InfoAdicionalDoc" type="{http://cfe.dgi.gub.uy}InfoAdicionalType" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "IdDoc_Fact", propOrder = { "tipoCFE", "serie", "nro", "fchEmis", "periodoDesde", "periodoHasta", "mntBruto", "fmaPago", "fchVenc", "infoAdicionalDoc" }) public class IdDocFact { @XmlElement(name = "TipoCFE", required = true) protected BigInteger tipoCFE; @XmlElement(name = "Serie", required = true) protected String serie; @XmlElement(name = "Nro", required = true) protected BigInteger nro; @XmlElement(name = "FchEmis", required = true) protected XMLGregorianCalendar fchEmis; @XmlElement(name = "PeriodoDesde") protected XMLGregorianCalendar periodoDesde; @XmlElement(name = "PeriodoHasta") protected XMLGregorianCalendar periodoHasta; @XmlElement(name = "MntBruto") protected BigInteger mntBruto; @XmlElement(name = "FmaPago", required = true) protected BigInteger fmaPago; @XmlElement(name = "FchVenc") protected XMLGregorianCalendar fchVenc; @XmlElement(name = "InfoAdicionalDoc") protected String infoAdicionalDoc; /** * Gets the value of the tipoCFE property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getTipoCFE() { return tipoCFE; } /** * Sets the value of the tipoCFE property. * * @param value * allowed object is * {@link BigInteger } * */ public void setTipoCFE(BigInteger value) { this.tipoCFE = value; } /** * Gets the value of the serie property. * * @return * possible object is * {@link String } * */ public String getSerie() { return serie; } /** * Sets the value of the serie property. * * @param value * allowed object is * {@link String } * */ public void setSerie(String value) { this.serie = value; } /** * Gets the value of the nro property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getNro() { return nro; } /** * Sets the value of the nro property. * * @param value * allowed object is * {@link BigInteger } * */ public void setNro(BigInteger value) { this.nro = value; } /** * Gets the value of the fchEmis property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getFchEmis() { return fchEmis; } /** * Sets the value of the fchEmis property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setFchEmis(XMLGregorianCalendar value) { this.fchEmis = value; } /** * Gets the value of the periodoDesde property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getPeriodoDesde() { return periodoDesde; } /** * Sets the value of the periodoDesde property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setPeriodoDesde(XMLGregorianCalendar value) { this.periodoDesde = value; } /** * Gets the value of the periodoHasta property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getPeriodoHasta() { return periodoHasta; } /** * Sets the value of the periodoHasta property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setPeriodoHasta(XMLGregorianCalendar value) { this.periodoHasta = value; } /** * Gets the value of the mntBruto property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getMntBruto() { return mntBruto; } /** * Sets the value of the mntBruto property. * * @param value * allowed object is * {@link BigInteger } * */ public void setMntBruto(BigInteger value) { this.mntBruto = value; } /** * Gets the value of the fmaPago property. * * @return * possible object is * {@link BigInteger } * */ public BigInteger getFmaPago() { return fmaPago; } /** * Sets the value of the fmaPago property. * * @param value * allowed object is * {@link BigInteger } * */ public void setFmaPago(BigInteger value) { this.fmaPago = value; } /** * Gets the value of the fchVenc property. * * @return * possible object is * {@link XMLGregorianCalendar } * */ public XMLGregorianCalendar getFchVenc() { return fchVenc; } /** * Sets the value of the fchVenc property. * * @param value * allowed object is * {@link XMLGregorianCalendar } * */ public void setFchVenc(XMLGregorianCalendar value) { this.fchVenc = value; } /** * Gets the value of the infoAdicionalDoc property. * * @return * possible object is * {@link String } * */ public String getInfoAdicionalDoc() { return infoAdicionalDoc; } /** * Sets the value of the infoAdicionalDoc property. * * @param value * allowed object is * {@link String } * */ public void setInfoAdicionalDoc(String value) { this.infoAdicionalDoc = value; } }
/* * Copyright 2012 Kevin Seim * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.beanio.internal.parser.format.json; import java.util.List; import org.beanio.BeanWriterException; import org.beanio.internal.parser.*; import org.beanio.internal.parser.format.FieldPadding; import org.beanio.internal.util.JsonUtil; /** * A {@link FieldFormat} implementation for a field in a JSON formatted record. * * @author Kevin Seim * @since 2.0 */ public class JsonFieldFormat implements FieldFormat, JsonNode { /* the field name from the mapping file */ private String name; /* the jsonName from the mapping file */ private String jsonName; /* the JSON type: boolean, number or string */ private char jsonType; /* whether the field is mapped to an array */ private boolean jsonArray; /* set to the index of this field in a JSON array, or -1 if the field itself repeats */ private int jsonArrayIndex = -1; /* whether the field must be present in the stream (i.e minOccurs > 0) */ private boolean lazy; /* whether the field may be set to 'null' */ private boolean nillable; /* optional field padding */ private FieldPadding padding; /* whether type conversion can be bypassed and the value directly set into the map */ private boolean bypassTypeHandler; /** * Constructs a new <tt>JsonFieldFormat</tt>. */ public JsonFieldFormat() { } /* * (non-Javadoc) * @see org.beanio.internal.parser.FieldFormat#extract(org.beanio.internal.parser.UnmarshallingContext, boolean) */ @SuppressWarnings("unchecked") public String extract(UnmarshallingContext context, boolean reportErrors) { JsonUnmarshallingContext ctx = (JsonUnmarshallingContext) context; Object value = ctx.getValue(this); // nothing more to do with null or missing values if (value == null || value == Value.NIL) { ctx.setFieldText(getName(), null); return (String) value; } // extract the field from a list if repeating if (isJsonArray()) { int index = -1;// jsonArrayIndex; // TODO is this needed? if (index < 0) { index = ctx.getRelativeFieldIndex(); } try { List<Object> list = (List<Object>)value; if (index < list.size()) { value = list.get(index); } else { return null; } } catch (ClassCastException ex) { // if index is greater than zero, we're trying to get next value // which doesn't exist if the value isn't a list so return null // instead of repetitively reporting the same field error if (index > 0 && jsonArrayIndex < 0) { return null; } String fieldText = value.toString(); ctx.setFieldText(getName(), fieldText); if (reportErrors) { context.addFieldError(getName(), fieldText, "jsontype", JsonNodeUtil.getTypeDescription(jsonType, jsonArray)); } return Value.INVALID; } } // TODO validate JSON type (how should this be configured...?) // convert to field text String fieldText = value.toString(); ctx.setFieldText(getName(), fieldText); // handle padded fields if (padding != null) { int length = fieldText.length(); if (length == 0) { // this will either cause a required validation error or map // to a null value depending on the value of 'required' return ""; } else if (length != padding.getLength()) { if (reportErrors) { context.addFieldError(getName(), fieldText, "length", padding.getLength()); } fieldText = Value.INVALID; } else { fieldText = padding.unpad(fieldText); } } return fieldText; } /* * (non-Javadoc) * @see org.beanio.internal.parser.FieldFormat#insertValue(org.beanio.internal.parser.MarshallingContext, java.lang.Object) */ public boolean insertValue(MarshallingContext context, Object value) { if (!bypassTypeHandler) { return false; } JsonMarshallingContext ctx = (JsonMarshallingContext) context; if (value == Value.NIL) { ctx.put(this, null); } else if (value == null && isLazy()) { // do nothing } else { ctx.put(this, value); } return true; } /* * (non-Javadoc) * @see org.beanio.internal.parser.FieldFormat#insertField(org.beanio.internal.parser.MarshallingContext, java.lang.String) */ public void insertField(MarshallingContext context, String text) { JsonMarshallingContext ctx = (JsonMarshallingContext) context; if (text == Value.NIL) { ctx.put(this, null); return; } if (text == null && isLazy()) { return; } Object value = null; // convert text to JSON type switch (jsonType) { case JsonNode.BOOLEAN: try { value = JsonUtil.toBoolean(text); } catch (IllegalArgumentException ex) { throw new BeanWriterException("Cannot parse '" + text + "' into a JSON number", ex); } break; case JsonNode.NUMBER: try { value = JsonUtil.toNumber(text); } catch (NumberFormatException ex) { throw new BeanWriterException("Cannot parse '" + text + "' into a JSON number", ex); } break; case JsonNode.STRING: value = text; break; default: throw new BeanWriterException("Invalid jsonType"); } ctx.put(this, value); } /* * (non-Javadoc) * @see org.beanio.internal.parser.format.json.JsonType#getName() */ public String getName() { return name; } /** * * @param name */ public void setName(String name) { this.name = name; } /** * * @param lazy */ public void setLazy(boolean lazy) { this.lazy = lazy; } /* * (non-Javadoc) * @see org.beanio.internal.parser.FieldFormat#getSize() */ public int getSize() { return 1; } /** * * @param nillable */ public void setNillable(boolean nillable) { this.nillable = nillable; } /* * (non-Javadoc) * @see org.beanio.internal.parser.FieldFormat#isNillable() */ public boolean isNillable() { return nillable; } /* * (non-Javadoc) * @see org.beanio.internal.parser.FieldFormat#isLazy() */ public boolean isLazy() { return lazy; } /** * * @param jsonName */ public void setJsonName(String jsonName) { this.jsonName = jsonName; } /* * (non-Javadoc) * @see org.beanio.internal.parser.format.json.JsonType#getJsonName() */ public String getJsonName() { return jsonName; } /** * * @param jsonType */ public void setJsonType(char jsonType) { this.jsonType = jsonType; } /* * (non-Javadoc) * @see org.beanio.internal.parser.format.json.JsonType#getJsonType() */ public char getJsonType() { return jsonType; } public void setJsonArray(boolean repeating) { this.jsonArray = repeating; } public boolean isJsonArray() { return jsonArray; } public int getJsonArrayIndex() { return jsonArrayIndex; } public void setJsonArrayIndex(int jsonArrayIndex) { this.jsonArrayIndex = jsonArrayIndex; } public FieldPadding getPadding() { return padding; } public void setPadding(FieldPadding padding) { this.padding = padding; } public boolean isBypassTypeHandler() { return bypassTypeHandler; } public void setBypassTypeHandler(boolean bypassTypeHandler) { this.bypassTypeHandler = bypassTypeHandler; } @Override public String toString() { return getClass().getSimpleName() + "[name=" + getName() + ", jsonName=" + jsonName + ", jsonType=" + jsonType + (isJsonArray() ? "[]" : "") + ", jsonArrayIndex=" + jsonArrayIndex + ", bypass=" + bypassTypeHandler + "]"; } }
package com.mindbodyonline.ironhide.Infrastructure.IronhideViews; import android.support.test.espresso.Root; import android.support.test.espresso.action.ViewActions; import android.support.test.espresso.matcher.ViewMatchers; import android.view.View; import com.mindbodyonline.ironhide.Infrastructure.Extensions.TextViewMatchers; import com.mindbodyonline.ironhide.PageObjects.PageObject; import org.hamcrest.Matcher; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalToIgnoringCase; import static org.hamcrest.Matchers.equalToIgnoringWhiteSpace; import static org.hamcrest.Matchers.startsWith; /** * Simple element that allows text interaction. * Implements methods such as typeText, enterText, checkHintText * Only use this element when dealing with an editable text field (e.g. an {@link android.widget.EditText}) * * @param <T> The model the current element will return when interacted with */ public class TextField<T extends PageObject> extends BaseView<T> { /** @see BaseView#BaseView(Class, org.hamcrest.Matcher) */ public TextField(Class<T> type, Matcher<View> selector) { super(type, selector); } /** @see BaseView#BaseView(Class, int) */ public TextField(Class<T> type, int resourceId) { super(type, resourceId); } /** @see BaseView#BaseView(Class, int, int) */ public TextField(Class<T> type, int IGNORED, int stringResourceId) { super(type, IGNORED, stringResourceId); } /** @see BaseView#BaseView(Class, String) */ public TextField(Class<T> type, String displayText) { super(type, displayText); } /** {@inheritDoc} */ @Override public <E extends PageObject> TextField<E> goesTo(Class<E> type) { return new TextField<>(type, getSelector()); } /** * Type the given text into the element. * @param toType Text to type * @return The model reached by interacting with this element. */ public T typeText(String toType) { return performAction(ViewActions.typeText(toType)); } /** * Type the given text into the element. * * @param toType Text to type * @return The model reached by interacting with this element. */ public T typeText(int stringIdToType) { return performAction(ViewActions.typeText(fromId(stringIdToType))); } /** * Clear the text from the element. * @return The model reached by interacting with this element. */ public T clearText() { return performAction(ViewActions.clearText()); } /** * Type the given text into the element. Element is assumed to have focus. * @param stringToBeTyped Text to type. * @return The model reached by interacting with this element. */ public T typeTextIntoFocusedView(String stringToBeTyped) { return performAction(ViewActions.typeTextIntoFocusedView(stringToBeTyped)); } /** * Replace the current text in the text field with the given new text. * @param newText Text to replace with * @return The model reached by interacting with this element. */ public T replaceText(String newText) { return performAction(ViewActions.replaceText(newText)); } /** * Check to see if the element contains the given hint text. * @param stringId Resource id of the string to check for. * @return The model reached by interacting with this element. */ public T withHintText(int stringId) { return checkMatches(ViewMatchers.withHint(stringId)); } /** * Check to see if the element contains the given hint text. * @param string The string to check for. * @return The model reached by interacting with this element. */ public T withHintText(String string) { return checkMatches(ViewMatchers.withHint(string)); } /** * Check to see if the element contains the given hint text. * @param stringMatcher String matcher to check against. * @return The model reached by interacting with this element. */ public T withHintText(Matcher<String> stringMatcher) { return checkMatches(ViewMatchers.withHint(stringMatcher));} /** * More matchers */ /** * Checks to see if the element contains the string with the given resourceId. * * @param resourceId Resource ID of the string to check for. * @return The model reached by interacting with this element. */ public T hintContainsString(int resourceId) { return withHintText(containsString(fromId(resourceId))); } /** * Checks to see if a TextView's text ends with a certain string given the string's resource id. * * @param resourceId The string's resource id * @return The model reached by interacting with this element. */ public T hintEndsWith(int resourceId) { return withHintText(endsWith(fromId(resourceId))); } /** * Checks to see if a TextView's text is equal to (ignoring case) a certain string given the string's resource id. * * @param resourceId The string's resource id * @return The model reached by interacting with this element. */ public T hintEqualToIgnoringCase(int resourceId) { return withHintText(equalToIgnoringCase(fromId(resourceId))); } /** * Checks to see if a TextView's text is equal to (ignoring white space around words) a certain string given the string's resource id. * * @param resourceId The string's resource id * @return The model reached by interacting with this element. */ public T hintEqualToIgnoringWhiteSpace(int resourceId) { return withHintText(equalToIgnoringWhiteSpace(fromId(resourceId))); } /** * Checks to see if a TextView's text is empty or null. * NOTE: see issue 72 for Espresso (https://code.google.com/p/android-test-kit/issues/detail?id=72) * * @return The model reached by interacting with this element. */ public T isEmptyOrNullHint() { return checkMatches(TextViewMatchers.isEmptyOrNullHint()); } /** * Checks to see if a TextView's text is empty. * NOTE: see issue 72 for Espresso (https://code.google.com/p/android-test-kit/issues/detail?id=72) * * @return The model reached by interacting with this element. */ public T isEmptyHint() { return checkMatches(TextViewMatchers.isEmptyHint()); } /** * Checks to see if a TextView's text starts with a certain string given the string's resource id. * * @param resourceId The string's resource id * @return The model reached by interacting with this element. */ public T hintStartsWith(final int resourceId) { return withHintText(startsWith(fromId(resourceId))); } /** * Root Matchers return LayoutView */ /** {@inheritDoc} */ @Override public TextField<T> changeRoot() { return (TextField<T>) super.changeRoot(); } /** {@inheritDoc} */ @Override public TextField<T> inRoot(Matcher<Root> rootMatcher) { return (TextField<T>) super.inRoot(rootMatcher); } /** {@inheritDoc} */ @Override public TextField<T> inDialogRoot() { return (TextField<T>) super.inDialogRoot(); } /** {@inheritDoc} */ @Override public TextField<T> inPlatformPopup() { return (TextField<T>) super.inPlatformPopup(); } /** {@inheritDoc} */ @Override public TextField<T> inTouchableRoot() { return (TextField<T>) super.inTouchableRoot(); } /** {@inheritDoc} */ @Override public TextField<T> inDecorView(Matcher<View> decorViewMatcher) { return (TextField<T>) super.inDecorView(decorViewMatcher); } /** {@inheritDoc} */ @Override public TextField<T> inFocusableRoot() { return (TextField<T>) super.inFocusableRoot(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package org.apache.kafka.connect.runtime; import org.apache.kafka.common.KafkaException; import org.apache.kafka.common.utils.SystemTime; import org.apache.kafka.common.utils.Time; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.utils.Utils; import org.apache.kafka.connect.connector.Connector; import org.apache.kafka.connect.connector.ConnectorContext; import org.apache.kafka.connect.connector.Task; import org.apache.kafka.connect.errors.ConnectException; import org.apache.kafka.connect.sink.SinkConnector; import org.apache.kafka.connect.sink.SinkTask; import org.apache.kafka.connect.source.SourceTask; import org.apache.kafka.connect.storage.Converter; import org.apache.kafka.connect.storage.OffsetBackingStore; import org.apache.kafka.connect.storage.OffsetStorageReader; import org.apache.kafka.connect.storage.OffsetStorageWriter; import org.apache.kafka.connect.storage.OffsetStorageReaderImpl; import org.apache.kafka.connect.util.ConnectorTaskId; import org.reflections.Reflections; import org.reflections.util.ClasspathHelper; import org.reflections.util.ConfigurationBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; /** * <p> * Worker runs a (dynamic) set of tasks in a set of threads, doing the work of actually moving * data to/from Kafka. * </p> * <p> * Since each task has a dedicated thread, this is mainly just a container for them. * </p> */ public class Worker { private static final Logger log = LoggerFactory.getLogger(Worker.class); private Time time; private WorkerConfig config; private Converter keyConverter; private Converter valueConverter; private Converter internalKeyConverter; private Converter internalValueConverter; private OffsetBackingStore offsetBackingStore; private HashMap<String, Connector> connectors = new HashMap<>(); private HashMap<ConnectorTaskId, WorkerTask> tasks = new HashMap<>(); private KafkaProducer<byte[], byte[]> producer; private SourceTaskOffsetCommitter sourceTaskOffsetCommitter; public Worker(WorkerConfig config, OffsetBackingStore offsetBackingStore) { this(new SystemTime(), config, offsetBackingStore); } @SuppressWarnings("unchecked") public Worker(Time time, WorkerConfig config, OffsetBackingStore offsetBackingStore) { this.time = time; this.config = config; this.keyConverter = config.getConfiguredInstance(WorkerConfig.KEY_CONVERTER_CLASS_CONFIG, Converter.class); this.keyConverter.configure(config.originalsWithPrefix("key.converter."), true); this.valueConverter = config.getConfiguredInstance(WorkerConfig.VALUE_CONVERTER_CLASS_CONFIG, Converter.class); this.valueConverter.configure(config.originalsWithPrefix("value.converter."), false); this.internalKeyConverter = config.getConfiguredInstance(WorkerConfig.INTERNAL_KEY_CONVERTER_CLASS_CONFIG, Converter.class); this.internalKeyConverter.configure(config.originalsWithPrefix("internal.key.converter."), true); this.internalValueConverter = config.getConfiguredInstance(WorkerConfig.INTERNAL_VALUE_CONVERTER_CLASS_CONFIG, Converter.class); this.internalValueConverter.configure(config.originalsWithPrefix("internal.value.converter."), false); this.offsetBackingStore = offsetBackingStore; this.offsetBackingStore.configure(config.originals()); } public void start() { log.info("Worker starting"); Map<String, Object> producerProps = new HashMap<>(); producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, Utils.join(config.getList(WorkerConfig.BOOTSTRAP_SERVERS_CONFIG), ",")); producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArraySerializer"); // These settings are designed to ensure there is no data loss. They *may* be overridden via configs passed to the // worker, but this may compromise the delivery guarantees of Kafka Connect. producerProps.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, ((Integer) Integer.MAX_VALUE).toString()); producerProps.put(ProducerConfig.RETRIES_CONFIG, ((Integer) Integer.MAX_VALUE).toString()); producerProps.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, ((Long) Long.MAX_VALUE).toString()); producerProps.put(ProducerConfig.ACKS_CONFIG, "all"); producerProps.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, "1"); producerProps.putAll(config.originalsWithPrefix("producer.")); producer = new KafkaProducer<>(producerProps); offsetBackingStore.start(); sourceTaskOffsetCommitter = new SourceTaskOffsetCommitter(time, config); log.info("Worker started"); } public void stop() { log.info("Worker stopping"); long started = time.milliseconds(); long limit = started + config.getLong(WorkerConfig.TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_CONFIG); for (Map.Entry<String, Connector> entry : connectors.entrySet()) { Connector conn = entry.getValue(); log.warn("Shutting down connector {} uncleanly; herder should have shut down connectors before the" + "Worker is stopped.", conn); try { conn.stop(); } catch (ConnectException e) { log.error("Error while shutting down connector " + conn, e); } } for (Map.Entry<ConnectorTaskId, WorkerTask> entry : tasks.entrySet()) { WorkerTask task = entry.getValue(); log.warn("Shutting down task {} uncleanly; herder should have shut down " + "tasks before the Worker is stopped.", task); try { task.stop(); } catch (ConnectException e) { log.error("Error while shutting down task " + task, e); } } for (Map.Entry<ConnectorTaskId, WorkerTask> entry : tasks.entrySet()) { WorkerTask task = entry.getValue(); log.debug("Waiting for task {} to finish shutting down", task); if (!task.awaitStop(Math.max(limit - time.milliseconds(), 0))) log.error("Graceful shutdown of task {} failed.", task); task.close(); } long timeoutMs = limit - time.milliseconds(); sourceTaskOffsetCommitter.close(timeoutMs); offsetBackingStore.stop(); log.info("Worker stopped"); } public WorkerConfig config() { return config; } /** * Add a new connector. * @param connConfig connector configuration * @param ctx context for the connector */ public void addConnector(ConnectorConfig connConfig, ConnectorContext ctx) { String connName = connConfig.getString(ConnectorConfig.NAME_CONFIG); Class<? extends Connector> connClass = getConnectorClass(connConfig.getString(ConnectorConfig.CONNECTOR_CLASS_CONFIG)); log.info("Creating connector {} of type {}", connName, connClass.getName()); if (connectors.containsKey(connName)) throw new ConnectException("Connector with name " + connName + " already exists"); final Connector connector = instantiateConnector(connClass); log.info("Instantiated connector {} with version {} of type {}", connName, connector.version(), connClass.getName()); connector.initialize(ctx); try { connector.start(connConfig.originalsStrings()); } catch (ConnectException e) { throw new ConnectException("Connector threw an exception while starting", e); } connectors.put(connName, connector); log.info("Finished creating connector {}", connName); } /* Now that the configuration doesn't contain the actual class name, we need to be able to tell the herder whether a connector is a Sink */ public boolean isSinkConnector(String connName) { return SinkConnector.class.isAssignableFrom(connectors.get(connName).getClass()); } private Class<? extends Connector> getConnectorClass(String connectorAlias) { // Avoid the classpath scan if the full class name was provided try { Class<?> clazz = Class.forName(connectorAlias); if (!Connector.class.isAssignableFrom(clazz)) throw new ConnectException("Class " + connectorAlias + " does not implement Connector"); return (Class<? extends Connector>) clazz; } catch (ClassNotFoundException e) { // Fall through to scan for the alias } // Iterate over our entire classpath to find all the connectors and hopefully one of them matches the alias from the connector configration Reflections reflections = new Reflections(new ConfigurationBuilder() .setUrls(ClasspathHelper.forJavaClassPath())); Set<Class<? extends Connector>> connectors = reflections.getSubTypesOf(Connector.class); List<Class<? extends Connector>> results = new ArrayList<>(); for (Class<? extends Connector> connector: connectors) { // Configuration included the class name but not package if (connector.getSimpleName().equals(connectorAlias)) results.add(connector); // Configuration included a short version of the name (i.e. FileStreamSink instead of FileStreamSinkConnector) if (connector.getSimpleName().equals(connectorAlias + "Connector")) results.add(connector); } if (results.isEmpty()) throw new ConnectException("Failed to find any class that implements Connector and which name matches " + connectorAlias + " available connectors are: " + connectorNames(connectors)); if (results.size() > 1) { throw new ConnectException("More than one connector matches alias " + connectorAlias + ". Please use full package + class name instead. Classes found: " + connectorNames(results)); } // We just validated that we have exactly one result, so this is safe return results.get(0); } private String connectorNames(Collection<Class<? extends Connector>> connectors) { StringBuilder names = new StringBuilder(); for (Class<?> c : connectors) names.append(c.getName()).append(", "); return names.substring(0, names.toString().length() - 2); } private static Connector instantiateConnector(Class<? extends Connector> connClass) { try { return Utils.newInstance(connClass); } catch (Throwable t) { // Catches normal exceptions due to instantiation errors as well as any runtime errors that // may be caused by user code throw new ConnectException("Failed to create connector instance", t); } } public List<Map<String, String>> connectorTaskConfigs(String connName, int maxTasks, List<String> sinkTopics) { log.trace("Reconfiguring connector tasks for {}", connName); Connector connector = connectors.get(connName); if (connector == null) throw new ConnectException("Connector " + connName + " not found in this worker."); List<Map<String, String>> result = new ArrayList<>(); String taskClassName = connector.taskClass().getName(); for (Map<String, String> taskProps : connector.taskConfigs(maxTasks)) { Map<String, String> taskConfig = new HashMap<>(taskProps); // Ensure we don't modify the connector's copy of the config taskConfig.put(TaskConfig.TASK_CLASS_CONFIG, taskClassName); if (sinkTopics != null) taskConfig.put(SinkTask.TOPICS_CONFIG, Utils.join(sinkTopics, ",")); result.add(taskConfig); } return result; } public void stopConnector(String connName) { log.info("Stopping connector {}", connName); Connector connector = connectors.get(connName); if (connector == null) throw new ConnectException("Connector " + connName + " not found in this worker."); try { connector.stop(); } catch (ConnectException e) { log.error("Error shutting down connector {}: ", connector, e); } connectors.remove(connName); log.info("Stopped connector {}", connName); } /** * Get the IDs of the connectors currently running in this worker. */ public Set<String> connectorNames() { return connectors.keySet(); } /** * Add a new task. * @param id Globally unique ID for this task. * @param taskConfig the parsed task configuration */ public void addTask(ConnectorTaskId id, TaskConfig taskConfig) { log.info("Creating task {}", id); if (tasks.containsKey(id)) { String msg = "Task already exists in this worker; the herder should not have requested " + "that this : " + id; log.error(msg); throw new ConnectException(msg); } Class<? extends Task> taskClass = taskConfig.getClass(TaskConfig.TASK_CLASS_CONFIG).asSubclass(Task.class); final Task task = instantiateTask(taskClass); log.info("Instantiated task {} with version {} of type {}", id, task.version(), taskClass.getName()); // Decide which type of worker task we need based on the type of task. final WorkerTask workerTask; if (task instanceof SourceTask) { SourceTask sourceTask = (SourceTask) task; OffsetStorageReader offsetReader = new OffsetStorageReaderImpl(offsetBackingStore, id.connector(), internalKeyConverter, internalValueConverter); OffsetStorageWriter offsetWriter = new OffsetStorageWriter(offsetBackingStore, id.connector(), internalKeyConverter, internalValueConverter); workerTask = new WorkerSourceTask(id, sourceTask, keyConverter, valueConverter, producer, offsetReader, offsetWriter, config, time); } else if (task instanceof SinkTask) { workerTask = new WorkerSinkTask(id, (SinkTask) task, config, keyConverter, valueConverter, time); } else { log.error("Tasks must be a subclass of either SourceTask or SinkTask", task); throw new ConnectException("Tasks must be a subclass of either SourceTask or SinkTask"); } // Start the task before adding modifying any state, any exceptions are caught higher up the // call chain and there's no cleanup to do here workerTask.start(taskConfig.originalsStrings()); if (task instanceof SourceTask) { WorkerSourceTask workerSourceTask = (WorkerSourceTask) workerTask; sourceTaskOffsetCommitter.schedule(id, workerSourceTask); } tasks.put(id, workerTask); } private static Task instantiateTask(Class<? extends Task> taskClass) { try { return Utils.newInstance(taskClass); } catch (KafkaException e) { throw new ConnectException("Task class not found", e); } } public void stopTask(ConnectorTaskId id) { log.info("Stopping task {}", id); WorkerTask task = getTask(id); if (task instanceof WorkerSourceTask) sourceTaskOffsetCommitter.remove(id); task.stop(); if (!task.awaitStop(config.getLong(WorkerConfig.TASK_SHUTDOWN_GRACEFUL_TIMEOUT_MS_CONFIG))) log.error("Graceful stop of task {} failed.", task); task.close(); tasks.remove(id); } /** * Get the IDs of the tasks currently running in this worker. */ public Set<ConnectorTaskId> taskIds() { return tasks.keySet(); } private WorkerTask getTask(ConnectorTaskId id) { WorkerTask task = tasks.get(id); if (task == null) { log.error("Task not found: " + id); throw new ConnectException("Task not found: " + id); } return task; } public Converter getInternalKeyConverter() { return internalKeyConverter; } public Converter getInternalValueConverter() { return internalValueConverter; } }
/* * Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.client.impl.proxy; import com.hazelcast.client.impl.protocol.ClientMessage; import com.hazelcast.client.impl.protocol.codec.SetAddAllCodec; import com.hazelcast.client.impl.protocol.codec.SetAddCodec; import com.hazelcast.client.impl.protocol.codec.SetAddListenerCodec; import com.hazelcast.client.impl.protocol.codec.SetClearCodec; import com.hazelcast.client.impl.protocol.codec.SetCompareAndRemoveAllCodec; import com.hazelcast.client.impl.protocol.codec.SetCompareAndRetainAllCodec; import com.hazelcast.client.impl.protocol.codec.SetContainsAllCodec; import com.hazelcast.client.impl.protocol.codec.SetContainsCodec; import com.hazelcast.client.impl.protocol.codec.SetGetAllCodec; import com.hazelcast.client.impl.protocol.codec.SetIsEmptyCodec; import com.hazelcast.client.impl.protocol.codec.SetRemoveCodec; import com.hazelcast.client.impl.protocol.codec.SetRemoveListenerCodec; import com.hazelcast.client.impl.protocol.codec.SetSizeCodec; import com.hazelcast.client.impl.spi.ClientContext; import com.hazelcast.client.impl.spi.EventHandler; import com.hazelcast.client.impl.spi.impl.ListenerMessageCodec; import com.hazelcast.cluster.Member; import com.hazelcast.collection.ISet; import com.hazelcast.collection.ItemEvent; import com.hazelcast.collection.ItemListener; import com.hazelcast.collection.LocalSetStats; import com.hazelcast.collection.impl.common.DataAwareItemEvent; import com.hazelcast.core.ItemEventType; import com.hazelcast.internal.serialization.Data; import com.hazelcast.spi.impl.UnmodifiableLazyList; import javax.annotation.Nonnull; import java.util.Collection; import java.util.Iterator; import java.util.UUID; import static com.hazelcast.internal.util.CollectionUtil.objectToDataCollection; import static com.hazelcast.internal.util.Preconditions.checkNotNull; /** * Proxy implementation of {@link ISet}. * * @param <E> the type of elements in this set */ public class ClientSetProxy<E> extends PartitionSpecificClientProxy implements ISet<E> { public ClientSetProxy(String serviceName, String name, ClientContext context) { super(serviceName, name, context); } @Override public int size() { ClientMessage request = SetSizeCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); return SetSizeCodec.decodeResponse(response); } @Override public boolean isEmpty() { ClientMessage request = SetIsEmptyCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); return SetIsEmptyCodec.decodeResponse(response); } @Override public boolean contains(@Nonnull Object o) { checkNotNull(o, "Null item is not allowed"); Data value = toData(o); ClientMessage request = SetContainsCodec.encodeRequest(name, value); ClientMessage response = invokeOnPartition(request); return SetContainsCodec.decodeResponse(response); } @Override public Iterator<E> iterator() { return getAll().iterator(); } @Override public Object[] toArray() { return getAll().toArray(); } @Override public <T> T[] toArray(@Nonnull T[] a) { checkNotNull(a, "Null array parameter is not allowed!"); return getAll().toArray(a); } @Override public boolean add(@Nonnull E e) { checkNotNull(e, "Null item is not allowed"); Data element = toData(e); ClientMessage request = SetAddCodec.encodeRequest(name, element); ClientMessage response = invokeOnPartition(request); return SetAddCodec.decodeResponse(response); } @Override public boolean remove(@Nonnull Object o) { checkNotNull(o, "Null item is not allowed"); Data value = toData(o); ClientMessage request = SetRemoveCodec.encodeRequest(name, value); ClientMessage response = invokeOnPartition(request); return SetRemoveCodec.decodeResponse(response); } @Override public boolean containsAll(@Nonnull Collection<?> c) { checkNotNull(c, "Null collection is not allowed"); Collection<Data> dataCollection = objectToDataCollection(c, getSerializationService()); ClientMessage request = SetContainsAllCodec.encodeRequest(name, dataCollection); ClientMessage response = invokeOnPartition(request); return SetContainsAllCodec.decodeResponse(response); } @Override public boolean addAll(@Nonnull Collection<? extends E> c) { checkNotNull(c, "Null collection is not allowed"); Collection<Data> dataCollection = objectToDataCollection(c, getSerializationService()); ClientMessage request = SetAddAllCodec.encodeRequest(name, dataCollection); ClientMessage response = invokeOnPartition(request); return SetAddAllCodec.decodeResponse(response); } @Override public boolean removeAll(@Nonnull Collection<?> c) { checkNotNull(c, "Null collection is not allowed"); Collection<Data> dataCollection = objectToDataCollection(c, getSerializationService()); ClientMessage request = SetCompareAndRemoveAllCodec.encodeRequest(name, dataCollection); ClientMessage response = invokeOnPartition(request); return SetCompareAndRemoveAllCodec.decodeResponse(response); } @Override public boolean retainAll(@Nonnull Collection<?> c) { checkNotNull(c, "Null collection is not allowed"); Collection<Data> dataCollection = objectToDataCollection(c, getSerializationService()); ClientMessage request = SetCompareAndRetainAllCodec.encodeRequest(name, dataCollection); ClientMessage response = invokeOnPartition(request); return SetCompareAndRetainAllCodec.decodeResponse(response); } @Override public void clear() { ClientMessage request = SetClearCodec.encodeRequest(name); invokeOnPartition(request); } @Nonnull @Override public UUID addItemListener(@Nonnull final ItemListener<E> listener, final boolean includeValue) { checkNotNull(listener, "Null listener is not allowed!"); EventHandler<ClientMessage> eventHandler = new ItemEventHandler(listener); return registerListener(createItemListenerCodec(includeValue), eventHandler); } private ListenerMessageCodec createItemListenerCodec(final boolean includeValue) { return new ListenerMessageCodec() { @Override public ClientMessage encodeAddRequest(boolean localOnly) { return SetAddListenerCodec.encodeRequest(name, includeValue, localOnly); } @Override public UUID decodeAddResponse(ClientMessage clientMessage) { return SetAddListenerCodec.decodeResponse(clientMessage); } @Override public ClientMessage encodeRemoveRequest(UUID realRegistrationId) { return SetRemoveListenerCodec.encodeRequest(name, realRegistrationId); } @Override public boolean decodeRemoveResponse(ClientMessage clientMessage) { return SetRemoveListenerCodec.decodeResponse(clientMessage); } }; } @Override public boolean removeItemListener(@Nonnull UUID registrationId) { return deregisterListener(registrationId); } private Collection<E> getAll() { ClientMessage request = SetGetAllCodec.encodeRequest(name); ClientMessage response = invokeOnPartition(request); return new UnmodifiableLazyList(SetGetAllCodec.decodeResponse(response), getSerializationService()); } @Override public LocalSetStats getLocalSetStats() { throw new UnsupportedOperationException("Locality is ambiguous for client!"); } @Override public String toString() { return "ISet{" + "name='" + name + '\'' + '}'; } private class ItemEventHandler extends SetAddListenerCodec.AbstractEventHandler implements EventHandler<ClientMessage> { private final ItemListener<E> listener; ItemEventHandler(ItemListener<E> listener) { this.listener = listener; } @Override public void handleItemEvent(Data dataItem, UUID uuid, int eventType) { Member member = getContext().getClusterService().getMember(uuid); ItemEvent<E> itemEvent = new DataAwareItemEvent(name, ItemEventType.getByType(eventType), dataItem, member, getSerializationService()); if (eventType == ItemEventType.ADDED.getType()) { listener.itemAdded(itemEvent); } else { listener.itemRemoved(itemEvent); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.datetime.markup.html.basic; import java.text.SimpleDateFormat; import java.util.Date; import org.apache.wicket.IGenericComponent; import org.apache.wicket.datetime.DateConverter; import org.apache.wicket.datetime.PatternDateConverter; import org.apache.wicket.datetime.StyleDateConverter; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.model.IModel; import org.apache.wicket.util.convert.IConverter; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; /** * A label that is mapped to a <code>java.util.Date</code> object and that uses Joda time to format * values. * <p> * You can provide a date pattern in two of the constructors. When not provided, * {@link DateTimeFormat#shortDate()} will be used. * </p> * <p> * A special option is applyTimeZoneDifference which is an option that says whether to correct for * the difference between the client's time zone and server's time zone. This is true by default. * </p> * * @see DateTime * @see DateTimeFormat * @see DateTimeZone * * @author eelcohillenius */ public class DateLabel extends Label implements IGenericComponent<Date, DateLabel> { private static final long serialVersionUID = 1L; /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param model * The model * @param datePattern * The pattern to use. Must be not null. See {@link SimpleDateFormat} for available * patterns. * @return new instance * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forDatePattern(String id, IModel<Date> model, String datePattern) { return new DateLabel(id, model, new PatternDateConverter(datePattern, true)); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param datePattern * The pattern to use. Must be not null. See {@link SimpleDateFormat} for available * patterns. * @return new instance * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forDatePattern(String id, String datePattern) { return forDatePattern(id, null, datePattern); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param model * The model * @param dateStyle * style to use in case no pattern is provided. Must be two characters from the set * {"S", "M", "L", "F", "-"}. Must be not null. See * {@link DateTimeFormat#forStyle(String)} for options. * @return new instance * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forDateStyle(String id, IModel<Date> model, String dateStyle) { return new DateLabel(id, model, new StyleDateConverter(dateStyle, true)); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param dateStyle * style to use in case no pattern is provided. Must be two characters from the set * {"S", "M", "L", "F", "-"}. Must be not null. See * {@link DateTimeFormat#forStyle(String)} for options. * @return new instance * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forDateStyle(String id, String dateStyle) { return forDateStyle(id, null, dateStyle); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @return new instance * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forShortStyle(String id) { return forShortStyle(id, null); } /** * Creates a new DateLabel defaulting to using a short date pattern * * @param id * The id of the text field * @param model * The model * @return new instance * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel forShortStyle(String id, IModel<Date> model) { return new DateLabel(id, model, new StyleDateConverter(true)); } /** * Creates a new DateLabel using the provided converter. * * @param id * The id of the text field * @param converter * the date converter * @return new instance * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel withConverter(String id, DateConverter converter) { return withConverter(id, null, converter); } /** * Creates a new DateLabel using the provided converter. * * @param id * The id of the text field * @param model * The model * @param converter * the date converter * @return new instance * * @see org.apache.wicket.markup.html.form.TextField */ public static DateLabel withConverter(String id, IModel<Date> model, DateConverter converter) { return new DateLabel(id, model, converter); } /** optionally prepend to label. */ private String after; /** optionally append to label. */ private String before; /** * The converter for the Label */ private final DateConverter converter; /** * Construct with a converter. * * @param id * The component id * @param converter * The converter to use */ public DateLabel(String id, DateConverter converter) { this(id, null, converter); } /** * Construct with a converter. * * @param id * The component id * @param model * The model * @param converter * The converter to use */ public DateLabel(String id, IModel<Date> model, DateConverter converter) { super(id, model); if (converter == null) { throw new IllegalStateException("converter may not be null"); } this.converter = converter; } /** * @return after append to label or null */ public String getAfter() { return after; } /** * @return before prepend to label or null */ public String getBefore() { return before; } /** * Returns the specialized converter. */ @Override public <C> IConverter<C> getConverter(Class<C> clazz) { if (Date.class.isAssignableFrom(clazz)) { @SuppressWarnings("unchecked") IConverter<C> result = (IConverter<C>)converter; return result; } else { return super.getConverter(clazz); } } /** * @param after * append to label */ public void setAfter(String after) { this.after = after; } /** * @param before * prepend to label */ public void setBefore(String before) { this.before = before; } /** * {@inheritDoc} */ @Override public void onComponentTagBody(MarkupStream markupStream, ComponentTag openTag) { String s = getDefaultModelObjectAsString(); if (before != null) { s = before + s; } if (after != null) { s = s + after; } replaceComponentTagBody(markupStream, openTag, s); } }
package org.deeplearning4j.nn.layers.pooling; import org.apache.commons.lang3.ArrayUtils; import org.deeplearning4j.nn.api.Layer; import org.deeplearning4j.nn.api.MaskState; import org.deeplearning4j.nn.conf.NeuralNetConfiguration; import org.deeplearning4j.nn.conf.layers.PoolingType; import org.deeplearning4j.nn.gradient.DefaultGradient; import org.deeplearning4j.nn.gradient.Gradient; import org.deeplearning4j.nn.layers.AbstractLayer; import org.deeplearning4j.util.MaskedReductionUtil; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.impl.broadcast.BroadcastCopyOp; import org.nd4j.linalg.api.ops.impl.broadcast.BroadcastMulOp; import org.nd4j.linalg.api.ops.impl.transforms.IsMax; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.ops.transforms.Transforms; import org.nd4j.linalg.primitives.Pair; import java.util.Arrays; /** * Global pooling layer - used to do pooling over time for RNNs, and 2d pooling for CNNs.<br> * Supports the following {@link PoolingType}s: SUM, AVG, MAX, PNORM<br> * Global pooling layer can also handle mask arrays when dealing with variable length inputs. Mask arrays are assumed * to be 2d, and are fed forward through the network during training or post-training forward pass:<br> * - Time series: mask arrays are shape [minibatchSize, maxTimeSeriesLength] and contain values 0 or 1 only<br> * - CNNs: mask have shape [minibatchSize, height] or [minibatchSize, width]. Important: the current implementation assumes * that for CNNs + variable length (masking), the input shape is [minibatchSize, depth, height, 1] or * [minibatchSize, depth, 1, width] respectively. This is the case with global pooling in architectures like CNN for * sentence classification.<br> * <p> * Behaviour with default settings:<br> * - 3d (time series) input with shape [minibatchSize, vectorSize, timeSeriesLength] -> 2d output [minibatchSize, vectorSize]<br> * - 4d (CNN) input with shape [minibatchSize, depth, height, width] -> 2d output [minibatchSize, depth]<br> * <p> * Alternatively, by setting collapseDimensions = false in the configuration, it is possible to retain the reduced dimensions * as 1s: this gives [minibatchSize, vectorSize, 1] for RNN output, and [minibatchSize, depth, 1, 1] for CNN output.<br> * * @author Alex Black */ public class GlobalPoolingLayer extends AbstractLayer<org.deeplearning4j.nn.conf.layers.GlobalPoolingLayer> { private static final int[] DEFAULT_TIMESERIES_POOL_DIMS = new int[] {2}; private static final int[] DEFAULT_CNN_POOL_DIMS = new int[] {2, 3}; private final int[] poolingDimensions; private final boolean collapseDimensions; private final PoolingType poolingType; private final int pNorm; public GlobalPoolingLayer(NeuralNetConfiguration conf) { super(conf); org.deeplearning4j.nn.conf.layers.GlobalPoolingLayer layerConf = (org.deeplearning4j.nn.conf.layers.GlobalPoolingLayer) conf.getLayer(); poolingDimensions = layerConf.getPoolingDimensions(); collapseDimensions = layerConf.isCollapseDimensions(); poolingType = layerConf.getPoolingType(); pNorm = layerConf.getPnorm(); } @Override public INDArray preOutput(boolean training) { return activate(training); } @Override public boolean isPretrainLayer() { return false; } @Override public void clearNoiseWeightParams() { //No op } @Override public double calcL2(boolean backpropParamsOnly) { return 0; } @Override public double calcL1(boolean backpropParamsOnly) { return 0; } @Override public Type type() { return Type.SUBSAMPLING; } @Override public INDArray activate(boolean training) { if (input == null) { throw new IllegalStateException("Cannot perform forward pass: input not set for layer " + layerId()); } int[] poolDim = null; if (input.rank() == 3) { //TODO validation on pooling dimensions if (poolingDimensions == null) { //Use default pooling dimensions; poolDim = DEFAULT_TIMESERIES_POOL_DIMS; } else { poolDim = poolingDimensions; } } else if (input.rank() == 4) { //CNN activations if (poolingDimensions == null) { //Use default pooling dimensions; poolDim = DEFAULT_CNN_POOL_DIMS; } else { poolDim = poolingDimensions; } } else { throw new UnsupportedOperationException("Received rank " + input.rank() + " input (shape = " + Arrays.toString(input.shape()) + "). Only rank 3 (time series) and rank 4 (images" + "/CNN data) are currently supported for global pooling " + layerId()); } INDArray reduced2d; if (maskArray == null) { //Standard 'full array' global pooling op reduced2d = activateHelperFullArray(input, poolDim); } else { if (input.rank() == 3) { //Masked time series reduced2d = MaskedReductionUtil.maskedPoolingTimeSeries(poolingType, input, maskArray, pNorm); } else if (input.rank() == 4) { //Masked convolutions. 4d convolution data, shape [minibatch, depth, h, w] //and 2d mask array. //Because of this: for now we'll support *masked* CNN global pooling on either // [minibatch, depth, 1, X] or [minibatch, depth, X, 1] data // with a mask array of shape [minibatch, X] if (maskArray.rank() != 2) { throw new UnsupportedOperationException( "Only 2d mask arrays are currently supported for masked global reductions " + "on CNN data. Got 4d activations array (shape " + Arrays.toString(input.shape()) + ") and " + maskArray.rank() + "d mask array (shape " + Arrays.toString(maskArray.shape()) + ") " + layerId()); } int h = input.size(2); int w = input.size(3); int maskLength = maskArray.size(1); if ((h != 1 && w != 1) || (h != maskLength && w != maskLength)) { throw new UnsupportedOperationException( "Masked global pooling with on CNN data currently only supports data with h=1 or w=1:\n" + " input activations must have shape [minibatchSize,depth,height=1,width] or [minibatchSize,depth,height,width=1] with " + " mask array of shape [minibatchSize,width] or [minibatchSize,height] respectively.\n" + " Got 4d activations array (shape " + Arrays.toString(input.shape()) + ") and " + maskArray.rank() + "d mask array (shape " + Arrays.toString(maskArray.shape()) + ") " + layerId()); } //Valid combinations of global pooling + masking for CNNs: //dimensinos [2,3] with or without reduction if (DEFAULT_CNN_POOL_DIMS != poolDim && !Arrays.equals(DEFAULT_CNN_POOL_DIMS, poolDim)) { throw new UnsupportedOperationException( "Masked global pooling with on CNN data currently only supports poolling over dimensions " + "[2,3] (i.e., width and height - both required). Got pooling dimensions " + Arrays.toString(poolDim) + ") " + layerId()); } boolean maskAlongHeight = (h == maskLength); //At this point: can't confuse w and h, as one has to be 1... reduced2d = MaskedReductionUtil.maskedPoolingConvolution(poolingType, input, maskArray, maskAlongHeight, pNorm); } else { throw new UnsupportedOperationException("Invalid input: is rank " + input.rank() + " " + layerId()); } } if (collapseDimensions) { //Standard/common case return reduced2d; } else { int[] inputShape = input.shape(); if (input.rank() == 3) { return reduced2d.reshape(reduced2d.ordering(), inputShape[0], inputShape[1], 1); } else { return reduced2d.reshape(reduced2d.ordering(), inputShape[0], inputShape[1], 1, 1); } } } @Override public Layer clone() { return new GlobalPoolingLayer(conf); } private INDArray activateHelperFullArray(INDArray inputArray, int[] poolDim) { switch (poolingType) { case MAX: return inputArray.max(poolDim); case AVG: return inputArray.mean(poolDim); case SUM: return inputArray.sum(poolDim); case PNORM: //P norm: https://arxiv.org/pdf/1311.1780.pdf //out = (1/N * sum( |in| ^ p) ) ^ (1/p) int pnorm = layerConf().getPnorm(); INDArray abs = Transforms.abs(inputArray, true); Transforms.pow(abs, pnorm, false); INDArray pNorm = abs.sum(poolDim); return Transforms.pow(pNorm, 1.0 / pnorm, false); default: throw new RuntimeException("Unknown or not supported pooling type: " + poolingType + " " + layerId()); } } @Override public Pair<Gradient, INDArray> backpropGradient(INDArray epsilon) { if (!collapseDimensions && epsilon.rank() != 2) { int[] origShape = epsilon.shape(); //Don't collapse dims case: error should be [minibatch, vectorSize, 1] or [minibatch, depth, 1, 1] //Reshape it to 2d, to get rid of the 1s epsilon = epsilon.reshape(epsilon.ordering(), origShape[0], origShape[1]); } Gradient retGradient = new DefaultGradient(); //Empty: no params int[] poolDim = null; if (input.rank() == 3) { if (poolingDimensions == null) { //Use default pooling dimensions; poolDim = DEFAULT_TIMESERIES_POOL_DIMS; } else { poolDim = poolingDimensions; } } else if (input.rank() == 4) { //CNN activations if (poolingDimensions == null) { //Use default pooling dimensions; poolDim = DEFAULT_CNN_POOL_DIMS; } else { poolDim = poolingDimensions; } } INDArray epsilonNd; if (maskArray == null) { //Standard 'full array' global pooling op epsilonNd = epsilonHelperFullArray(input, epsilon, poolDim); } else { if (input.rank() == 3) { epsilonNd = MaskedReductionUtil.maskedPoolingEpsilonTimeSeries(poolingType, input, maskArray, epsilon, pNorm); } else if (input.rank() == 4) { int h = input.size(2); boolean maskAlongHeight = (h == maskArray.size(1)); epsilonNd = MaskedReductionUtil.maskedPoolingEpsilonCnn(poolingType, input, maskArray, epsilon, maskAlongHeight, pNorm); } else { throw new UnsupportedOperationException(layerId()); } } return new Pair<>(retGradient, epsilonNd); } private INDArray epsilonHelperFullArray(INDArray inputArray, INDArray epsilon, int[] poolDim) { //Broadcast: occurs on the remaining dimensions, after the pool dimensions have been removed. //TODO find a more efficient way to do this int[] broadcastDims = new int[inputArray.rank() - poolDim.length]; int count = 0; for (int i = 0; i < inputArray.rank(); i++) { if (ArrayUtils.contains(poolDim, i)) continue; broadcastDims[count++] = i; } switch (poolingType) { case MAX: INDArray isMax = Nd4j.getExecutioner().execAndReturn(new IsMax(inputArray.dup(), poolDim)); return Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(isMax, epsilon, isMax, broadcastDims)); case AVG: //if out = avg(in,dims) then dL/dIn = 1/N * dL/dOut int n = 1; for (int d : poolDim) { n *= inputArray.size(d); } INDArray ret = Nd4j.create(inputArray.shape()); Nd4j.getExecutioner().exec(new BroadcastCopyOp(ret, epsilon, ret, broadcastDims)); ret.divi(n); return ret; case SUM: INDArray retSum = Nd4j.create(inputArray.shape()); Nd4j.getExecutioner().exec(new BroadcastCopyOp(retSum, epsilon, retSum, broadcastDims)); return retSum; case PNORM: int pnorm = layerConf().getPnorm(); //First: do forward pass to get pNorm array INDArray abs = Transforms.abs(inputArray, true); Transforms.pow(abs, pnorm, false); INDArray pNorm = Transforms.pow(abs.sum(poolDim), 1.0 / pnorm); //dL/dIn = dL/dOut * dOut/dIn //dOut/dIn = in .* |in|^(p-2) / ||in||_p^(p-1), where ||in||_p is the output p-norm INDArray numerator; if (pnorm == 2) { numerator = inputArray.dup(); } else { INDArray absp2 = Transforms.pow(Transforms.abs(inputArray, true), pnorm - 2, false); numerator = inputArray.mul(absp2); } INDArray denom = Transforms.pow(pNorm, pnorm - 1, false); denom.rdivi(epsilon); Nd4j.getExecutioner().execAndReturn(new BroadcastMulOp(numerator, denom, numerator, broadcastDims)); return numerator; default: throw new RuntimeException("Unknown or not supported pooling type: " + poolingType + " " + layerId()); } } @Override public Pair<INDArray, MaskState> feedForwardMaskArray(INDArray maskArray, MaskState currentMaskState, int minibatchSize) { //Global pooling layer: no masking is possible after this point... i.e., masks have been taken into account // as part of the pooling this.maskArray = maskArray; this.maskState = null; //Not used in global pooling - always applied return null; } }
package org.drip.analytics.holset; /* * -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* * GENERATED on Fri Jan 11 19:54:07 EST 2013 ---- DO NOT DELETE */ /*! * Copyright (C) 2013 Lakshmi Krishnamurthy * Copyright (C) 2012 Lakshmi Krishnamurthy * Copyright (C) 2011 Lakshmi Krishnamurthy * * This file is part of CreditAnalytics, a free-software/open-source library for * fixed income analysts and developers - http://www.credit-trader.org * * CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special focus * towards the needs of the bonds and credit products community. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ public class DEMHoliday implements org.drip.analytics.holset.LocationHoliday { public DEMHoliday() { } public java.lang.String getHolidayLoc() { return "DEM"; } public org.drip.analytics.holiday.Locale getHolidaySet() { org.drip.analytics.holiday.Locale lh = new org.drip.analytics.holiday.Locale(); lh.addStaticHoliday ("01-JAN-1998", "New Years Day"); lh.addStaticHoliday ("23-FEB-1998", "Shrove Monday"); lh.addStaticHoliday ("10-APR-1998", "Good Friday"); lh.addStaticHoliday ("13-APR-1998", "Easter Monday"); lh.addStaticHoliday ("01-MAY-1998", "Labour Day"); lh.addStaticHoliday ("21-MAY-1998", "Ascension Day"); lh.addStaticHoliday ("01-JUN-1998", "Whit Monday"); lh.addStaticHoliday ("11-JUN-1998", "Corpus Christi"); lh.addStaticHoliday ("24-DEC-1998", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-1998", "Christmas Day"); lh.addStaticHoliday ("31-DEC-1998", "New Years Eve"); lh.addStaticHoliday ("01-JAN-1999", "New Years Day"); lh.addStaticHoliday ("15-FEB-1999", "Shrove Monday"); lh.addStaticHoliday ("02-APR-1999", "Good Friday"); lh.addStaticHoliday ("05-APR-1999", "Easter Monday"); lh.addStaticHoliday ("13-MAY-1999", "Ascension Day"); lh.addStaticHoliday ("24-MAY-1999", "Whit Monday"); lh.addStaticHoliday ("03-JUN-1999", "Corpus Christi"); lh.addStaticHoliday ("01-NOV-1999", "All Saints Day"); lh.addStaticHoliday ("24-DEC-1999", "Christmas Eve"); lh.addStaticHoliday ("31-DEC-1999", "New Years Eve"); lh.addStaticHoliday ("06-MAR-2000", "Shrove Monday"); lh.addStaticHoliday ("21-APR-2000", "Good Friday"); lh.addStaticHoliday ("24-APR-2000", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2000", "Labour Day"); lh.addStaticHoliday ("01-JUN-2000", "Ascension Day"); lh.addStaticHoliday ("12-JUN-2000", "Whit Monday"); lh.addStaticHoliday ("22-JUN-2000", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2000", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2000", "All Saints Day"); lh.addStaticHoliday ("25-DEC-2000", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2000", "St. Stephens Day"); lh.addStaticHoliday ("01-JAN-2001", "New Years Day"); lh.addStaticHoliday ("26-FEB-2001", "Shrove Monday"); lh.addStaticHoliday ("13-APR-2001", "Good Friday"); lh.addStaticHoliday ("16-APR-2001", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2001", "Labour Day"); lh.addStaticHoliday ("24-MAY-2001", "Ascension Day"); lh.addStaticHoliday ("04-JUN-2001", "Whit Monday"); lh.addStaticHoliday ("14-JUN-2001", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2001", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2001", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2001", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2001", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2001", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2001", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2002", "New Years Day"); lh.addStaticHoliday ("11-FEB-2002", "Shrove Monday"); lh.addStaticHoliday ("29-MAR-2002", "Good Friday"); lh.addStaticHoliday ("01-APR-2002", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2002", "Labour Day"); lh.addStaticHoliday ("09-MAY-2002", "Ascension Day"); lh.addStaticHoliday ("20-MAY-2002", "Whit Monday"); lh.addStaticHoliday ("30-MAY-2002", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2002", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2002", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2002", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2002", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2002", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2002", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2003", "New Years Day"); lh.addStaticHoliday ("03-MAR-2003", "Shrove Monday"); lh.addStaticHoliday ("18-APR-2003", "Good Friday"); lh.addStaticHoliday ("21-APR-2003", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2003", "Labour Day"); lh.addStaticHoliday ("29-MAY-2003", "Ascension Day"); lh.addStaticHoliday ("09-JUN-2003", "Whit Monday"); lh.addStaticHoliday ("19-JUN-2003", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2003", "Day of Unity"); lh.addStaticHoliday ("24-DEC-2003", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2003", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2003", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2003", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2004", "New Years Day"); lh.addStaticHoliday ("23-FEB-2004", "Shrove Monday"); lh.addStaticHoliday ("09-APR-2004", "Good Friday"); lh.addStaticHoliday ("12-APR-2004", "Easter Monday"); lh.addStaticHoliday ("20-MAY-2004", "Ascension Day"); lh.addStaticHoliday ("31-MAY-2004", "Whit Monday"); lh.addStaticHoliday ("10-JUN-2004", "Corpus Christi"); lh.addStaticHoliday ("01-NOV-2004", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2004", "Christmas Eve"); lh.addStaticHoliday ("31-DEC-2004", "New Years Eve"); lh.addStaticHoliday ("07-FEB-2005", "Shrove Monday"); lh.addStaticHoliday ("25-MAR-2005", "Good Friday"); lh.addStaticHoliday ("28-MAR-2005", "Easter Monday"); lh.addStaticHoliday ("05-MAY-2005", "Ascension Day"); lh.addStaticHoliday ("16-MAY-2005", "Whit Monday"); lh.addStaticHoliday ("26-MAY-2005", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2005", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2005", "All Saints Day"); lh.addStaticHoliday ("26-DEC-2005", "St. Stephens Day"); lh.addStaticHoliday ("27-FEB-2006", "Shrove Monday"); lh.addStaticHoliday ("14-APR-2006", "Good Friday"); lh.addStaticHoliday ("17-APR-2006", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2006", "Labour Day"); lh.addStaticHoliday ("25-MAY-2006", "Ascension Day"); lh.addStaticHoliday ("05-JUN-2006", "Whit Monday"); lh.addStaticHoliday ("15-JUN-2006", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2006", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2006", "All Saints Day"); lh.addStaticHoliday ("25-DEC-2006", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2006", "St. Stephens Day"); lh.addStaticHoliday ("01-JAN-2007", "New Years Day"); lh.addStaticHoliday ("19-FEB-2007", "Shrove Monday"); lh.addStaticHoliday ("06-APR-2007", "Good Friday"); lh.addStaticHoliday ("09-APR-2007", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2007", "Labour Day"); lh.addStaticHoliday ("17-MAY-2007", "Ascension Day"); lh.addStaticHoliday ("28-MAY-2007", "Whit Monday"); lh.addStaticHoliday ("07-JUN-2007", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2007", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2007", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2007", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2007", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2007", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2007", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2008", "New Years Day"); lh.addStaticHoliday ("04-FEB-2008", "Shrove Monday"); lh.addStaticHoliday ("21-MAR-2008", "Good Friday"); lh.addStaticHoliday ("24-MAR-2008", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2008", "Labour Day"); lh.addStaticHoliday ("12-MAY-2008", "Whit Monday"); lh.addStaticHoliday ("22-MAY-2008", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2008", "Day of Unity"); lh.addStaticHoliday ("24-DEC-2008", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2008", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2008", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2008", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2009", "New Years Day"); lh.addStaticHoliday ("23-FEB-2009", "Shrove Monday"); lh.addStaticHoliday ("10-APR-2009", "Good Friday"); lh.addStaticHoliday ("13-APR-2009", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2009", "Labour Day"); lh.addStaticHoliday ("21-MAY-2009", "Ascension Day"); lh.addStaticHoliday ("01-JUN-2009", "Whit Monday"); lh.addStaticHoliday ("11-JUN-2009", "Corpus Christi"); lh.addStaticHoliday ("24-DEC-2009", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2009", "Christmas Day"); lh.addStaticHoliday ("31-DEC-2009", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2010", "New Years Day"); lh.addStaticHoliday ("15-FEB-2010", "Shrove Monday"); lh.addStaticHoliday ("02-APR-2010", "Good Friday"); lh.addStaticHoliday ("05-APR-2010", "Easter Monday"); lh.addStaticHoliday ("13-MAY-2010", "Ascension Day"); lh.addStaticHoliday ("24-MAY-2010", "Whit Monday"); lh.addStaticHoliday ("03-JUN-2010", "Corpus Christi"); lh.addStaticHoliday ("01-NOV-2010", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2010", "Christmas Eve"); lh.addStaticHoliday ("31-DEC-2010", "New Years Eve"); lh.addStaticHoliday ("07-MAR-2011", "Shrove Monday"); lh.addStaticHoliday ("22-APR-2011", "Good Friday"); lh.addStaticHoliday ("25-APR-2011", "Easter Monday"); lh.addStaticHoliday ("02-JUN-2011", "Ascension Day"); lh.addStaticHoliday ("13-JUN-2011", "Whit Monday"); lh.addStaticHoliday ("23-JUN-2011", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2011", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2011", "All Saints Day"); lh.addStaticHoliday ("26-DEC-2011", "St. Stephens Day"); lh.addStaticHoliday ("20-FEB-2012", "Shrove Monday"); lh.addStaticHoliday ("06-APR-2012", "Good Friday"); lh.addStaticHoliday ("09-APR-2012", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2012", "Labour Day"); lh.addStaticHoliday ("17-MAY-2012", "Ascension Day"); lh.addStaticHoliday ("28-MAY-2012", "Whit Monday"); lh.addStaticHoliday ("07-JUN-2012", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2012", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2012", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2012", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2012", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2012", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2012", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2013", "New Years Day"); lh.addStaticHoliday ("11-FEB-2013", "Shrove Monday"); lh.addStaticHoliday ("29-MAR-2013", "Good Friday"); lh.addStaticHoliday ("01-APR-2013", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2013", "Labour Day"); lh.addStaticHoliday ("09-MAY-2013", "Ascension Day"); lh.addStaticHoliday ("20-MAY-2013", "Whit Monday"); lh.addStaticHoliday ("30-MAY-2013", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2013", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2013", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2013", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2013", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2013", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2013", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2014", "New Years Day"); lh.addStaticHoliday ("03-MAR-2014", "Shrove Monday"); lh.addStaticHoliday ("18-APR-2014", "Good Friday"); lh.addStaticHoliday ("21-APR-2014", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2014", "Labour Day"); lh.addStaticHoliday ("29-MAY-2014", "Ascension Day"); lh.addStaticHoliday ("09-JUN-2014", "Whit Monday"); lh.addStaticHoliday ("19-JUN-2014", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2014", "Day of Unity"); lh.addStaticHoliday ("24-DEC-2014", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2014", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2014", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2014", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2015", "New Years Day"); lh.addStaticHoliday ("16-FEB-2015", "Shrove Monday"); lh.addStaticHoliday ("03-APR-2015", "Good Friday"); lh.addStaticHoliday ("06-APR-2015", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2015", "Labour Day"); lh.addStaticHoliday ("14-MAY-2015", "Ascension Day"); lh.addStaticHoliday ("25-MAY-2015", "Whit Monday"); lh.addStaticHoliday ("04-JUN-2015", "Corpus Christi"); lh.addStaticHoliday ("24-DEC-2015", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2015", "Christmas Day"); lh.addStaticHoliday ("31-DEC-2015", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2016", "New Years Day"); lh.addStaticHoliday ("08-FEB-2016", "Shrove Monday"); lh.addStaticHoliday ("25-MAR-2016", "Good Friday"); lh.addStaticHoliday ("28-MAR-2016", "Easter Monday"); lh.addStaticHoliday ("05-MAY-2016", "Ascension Day"); lh.addStaticHoliday ("16-MAY-2016", "Whit Monday"); lh.addStaticHoliday ("26-MAY-2016", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2016", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2016", "All Saints Day"); lh.addStaticHoliday ("26-DEC-2016", "St. Stephens Day"); lh.addStaticHoliday ("27-FEB-2017", "Shrove Monday"); lh.addStaticHoliday ("14-APR-2017", "Good Friday"); lh.addStaticHoliday ("17-APR-2017", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2017", "Labour Day"); lh.addStaticHoliday ("25-MAY-2017", "Ascension Day"); lh.addStaticHoliday ("05-JUN-2017", "Whit Monday"); lh.addStaticHoliday ("15-JUN-2017", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2017", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2017", "All Saints Day"); lh.addStaticHoliday ("25-DEC-2017", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2017", "St. Stephens Day"); lh.addStaticHoliday ("01-JAN-2018", "New Years Day"); lh.addStaticHoliday ("12-FEB-2018", "Shrove Monday"); lh.addStaticHoliday ("30-MAR-2018", "Good Friday"); lh.addStaticHoliday ("02-APR-2018", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2018", "Labour Day"); lh.addStaticHoliday ("10-MAY-2018", "Ascension Day"); lh.addStaticHoliday ("21-MAY-2018", "Whit Monday"); lh.addStaticHoliday ("31-MAY-2018", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2018", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2018", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2018", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2018", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2018", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2018", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2019", "New Years Day"); lh.addStaticHoliday ("04-MAR-2019", "Shrove Monday"); lh.addStaticHoliday ("19-APR-2019", "Good Friday"); lh.addStaticHoliday ("22-APR-2019", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2019", "Labour Day"); lh.addStaticHoliday ("30-MAY-2019", "Ascension Day"); lh.addStaticHoliday ("10-JUN-2019", "Whit Monday"); lh.addStaticHoliday ("20-JUN-2019", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2019", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2019", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2019", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2019", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2019", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2019", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2020", "New Years Day"); lh.addStaticHoliday ("24-FEB-2020", "Shrove Monday"); lh.addStaticHoliday ("10-APR-2020", "Good Friday"); lh.addStaticHoliday ("13-APR-2020", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2020", "Labour Day"); lh.addStaticHoliday ("21-MAY-2020", "Ascension Day"); lh.addStaticHoliday ("01-JUN-2020", "Whit Monday"); lh.addStaticHoliday ("11-JUN-2020", "Corpus Christi"); lh.addStaticHoliday ("24-DEC-2020", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2020", "Christmas Day"); lh.addStaticHoliday ("31-DEC-2020", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2021", "New Years Day"); lh.addStaticHoliday ("15-FEB-2021", "Shrove Monday"); lh.addStaticHoliday ("02-APR-2021", "Good Friday"); lh.addStaticHoliday ("05-APR-2021", "Easter Monday"); lh.addStaticHoliday ("13-MAY-2021", "Ascension Day"); lh.addStaticHoliday ("24-MAY-2021", "Whit Monday"); lh.addStaticHoliday ("03-JUN-2021", "Corpus Christi"); lh.addStaticHoliday ("01-NOV-2021", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2021", "Christmas Eve"); lh.addStaticHoliday ("31-DEC-2021", "New Years Eve"); lh.addStaticHoliday ("28-FEB-2022", "Shrove Monday"); lh.addStaticHoliday ("15-APR-2022", "Good Friday"); lh.addStaticHoliday ("18-APR-2022", "Easter Monday"); lh.addStaticHoliday ("26-MAY-2022", "Ascension Day"); lh.addStaticHoliday ("06-JUN-2022", "Whit Monday"); lh.addStaticHoliday ("16-JUN-2022", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2022", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2022", "All Saints Day"); lh.addStaticHoliday ("26-DEC-2022", "St. Stephens Day"); lh.addStaticHoliday ("20-FEB-2023", "Shrove Monday"); lh.addStaticHoliday ("07-APR-2023", "Good Friday"); lh.addStaticHoliday ("10-APR-2023", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2023", "Labour Day"); lh.addStaticHoliday ("18-MAY-2023", "Ascension Day"); lh.addStaticHoliday ("29-MAY-2023", "Whit Monday"); lh.addStaticHoliday ("08-JUN-2023", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2023", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2023", "All Saints Day"); lh.addStaticHoliday ("25-DEC-2023", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2023", "St. Stephens Day"); lh.addStaticHoliday ("01-JAN-2024", "New Years Day"); lh.addStaticHoliday ("12-FEB-2024", "Shrove Monday"); lh.addStaticHoliday ("29-MAR-2024", "Good Friday"); lh.addStaticHoliday ("01-APR-2024", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2024", "Labour Day"); lh.addStaticHoliday ("09-MAY-2024", "Ascension Day"); lh.addStaticHoliday ("20-MAY-2024", "Whit Monday"); lh.addStaticHoliday ("30-MAY-2024", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2024", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2024", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2024", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2024", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2024", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2024", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2025", "New Years Day"); lh.addStaticHoliday ("03-MAR-2025", "Shrove Monday"); lh.addStaticHoliday ("18-APR-2025", "Good Friday"); lh.addStaticHoliday ("21-APR-2025", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2025", "Labour Day"); lh.addStaticHoliday ("29-MAY-2025", "Ascension Day"); lh.addStaticHoliday ("09-JUN-2025", "Whit Monday"); lh.addStaticHoliday ("19-JUN-2025", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2025", "Day of Unity"); lh.addStaticHoliday ("24-DEC-2025", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2025", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2025", "St. Stephens Day"); lh.addStaticHoliday ("31-DEC-2025", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2026", "New Years Day"); lh.addStaticHoliday ("16-FEB-2026", "Shrove Monday"); lh.addStaticHoliday ("03-APR-2026", "Good Friday"); lh.addStaticHoliday ("06-APR-2026", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2026", "Labour Day"); lh.addStaticHoliday ("14-MAY-2026", "Ascension Day"); lh.addStaticHoliday ("25-MAY-2026", "Whit Monday"); lh.addStaticHoliday ("04-JUN-2026", "Corpus Christi"); lh.addStaticHoliday ("24-DEC-2026", "Christmas Eve"); lh.addStaticHoliday ("25-DEC-2026", "Christmas Day"); lh.addStaticHoliday ("31-DEC-2026", "New Years Eve"); lh.addStaticHoliday ("01-JAN-2027", "New Years Day"); lh.addStaticHoliday ("08-FEB-2027", "Shrove Monday"); lh.addStaticHoliday ("26-MAR-2027", "Good Friday"); lh.addStaticHoliday ("29-MAR-2027", "Easter Monday"); lh.addStaticHoliday ("06-MAY-2027", "Ascension Day"); lh.addStaticHoliday ("17-MAY-2027", "Whit Monday"); lh.addStaticHoliday ("27-MAY-2027", "Corpus Christi"); lh.addStaticHoliday ("01-NOV-2027", "All Saints Day"); lh.addStaticHoliday ("24-DEC-2027", "Christmas Eve"); lh.addStaticHoliday ("31-DEC-2027", "New Years Eve"); lh.addStaticHoliday ("28-FEB-2028", "Shrove Monday"); lh.addStaticHoliday ("14-APR-2028", "Good Friday"); lh.addStaticHoliday ("17-APR-2028", "Easter Monday"); lh.addStaticHoliday ("01-MAY-2028", "Labour Day"); lh.addStaticHoliday ("25-MAY-2028", "Ascension Day"); lh.addStaticHoliday ("05-JUN-2028", "Whit Monday"); lh.addStaticHoliday ("15-JUN-2028", "Corpus Christi"); lh.addStaticHoliday ("03-OCT-2028", "Day of Unity"); lh.addStaticHoliday ("01-NOV-2028", "All Saints Day"); lh.addStaticHoliday ("25-DEC-2028", "Christmas Day"); lh.addStaticHoliday ("26-DEC-2028", "St. Stephens Day"); lh.addStandardWeekend(); return lh; } }
/* * JBoss, Home of Professional Open Source. * See the COPYRIGHT.txt file distributed with this work for information * regarding copyright ownership. Some portions may be licensed * to Red Hat, Inc. under one or more contributor license agreements. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301 USA. */ package org.teiid.resource.adapter.couchbase; import java.util.concurrent.TimeUnit; import javax.resource.ResourceException; import javax.resource.spi.InvalidPropertyException; import org.teiid.core.BundleUtil; import org.teiid.resource.spi.BasicConnectionFactory; import org.teiid.resource.spi.BasicManagedConnectionFactory; import com.couchbase.client.java.env.CouchbaseEnvironment; import com.couchbase.client.java.env.DefaultCouchbaseEnvironment; /** * Represents a managed connection factory instance for create {@code CouchbaseConnection}. * * @author kylin * */ public class CouchbaseManagedConnectionFactory extends BasicManagedConnectionFactory{ private static final long serialVersionUID = 8822399069779170119L; public static final BundleUtil UTIL = BundleUtil.getBundleUtil(CouchbaseManagedConnectionFactory.class); private Long managementTimeout = TimeUnit.SECONDS.toMillis(75); private Long queryTimeout = TimeUnit.SECONDS.toMillis(75); private Long viewTimeout = TimeUnit.SECONDS.toMillis(75); private Long kvTimeout = Long.valueOf(2500); private Long searchTimeout = TimeUnit.SECONDS.toMillis(75); private Long connectTimeout = TimeUnit.SECONDS.toMillis(5); private Boolean dnsSrvEnabled = false; /** * The connection string to identify the remote cluster */ private String connectionString = null; /** * The Keyspace/Bucket in Couchbase Server */ private String keyspace = null; private String namespace = null; /** * The Keyspace/Bucket password in Couchbase Server */ private String password = null; /** * Pair with connectTimeout, allowed value including MILLISECONDS, SECONDS, etc. */ private String timeUnit = null; public Long getManagementTimeout() { return managementTimeout; } public void setManagementTimeout(Long managementTimeout) { this.managementTimeout = managementTimeout; } public Long getQueryTimeout() { return queryTimeout; } public void setQueryTimeout(Long queryTimeout) { this.queryTimeout = queryTimeout; } public Long getViewTimeout() { return viewTimeout; } public void setViewTimeout(Long viewTimeout) { this.viewTimeout = viewTimeout; } public Long getKvTimeout() { return kvTimeout; } public void setKvTimeout(Long kvTimeout) { this.kvTimeout = kvTimeout; } public Long getSearchTimeout() { return searchTimeout; } public void setSearchTimeout(Long searchTimeout) { this.searchTimeout = searchTimeout; } public Long getConnectTimeout() { return connectTimeout; } public void setConnectTimeout(Long connectTimeout) { this.connectTimeout = connectTimeout; } public Boolean getDnsSrvEnabled() { return dnsSrvEnabled; } public void setDnsSrvEnabled(Boolean dnsSrvEnabled) { this.dnsSrvEnabled = dnsSrvEnabled; } public String getConnectionString() { return connectionString; } public void setConnectionString(String connectionString) { this.connectionString = connectionString; } public String getKeyspace() { return keyspace; } public void setKeyspace(String keyspace) { this.keyspace = keyspace; } public String getNamespace() { return namespace; } public void setNamespace(String namespace) { this.namespace = namespace; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getTimeUnit() { return timeUnit; } public void setTimeUnit(String timeUnit) { this.timeUnit = timeUnit; } @SuppressWarnings("serial") @Override public BasicConnectionFactory<CouchbaseConnectionImpl> createConnectionFactory() throws ResourceException { final CouchbaseEnvironment environment = DefaultCouchbaseEnvironment.builder() .managementTimeout(managementTimeout) .queryTimeout(queryTimeout) .viewTimeout(viewTimeout) .kvTimeout(kvTimeout) .searchTimeout(searchTimeout) .connectTimeout(connectTimeout) .dnsSrvEnabled(dnsSrvEnabled) .build(); if (this.connectionString == null) { throw new InvalidPropertyException(UTIL.getString("no_server")); //$NON-NLS-1$ } if (this.keyspace == null) { throw new InvalidPropertyException(UTIL.getString("no_keyspace")); //$NON-NLS-1$ } if (this.namespace == null) { throw new InvalidPropertyException(UTIL.getString("no_namespace")); //$NON-NLS-1$ } TimeUnit unit = TimeUnit.MILLISECONDS; if(this.timeUnit != null) { try { unit = TimeUnit.valueOf(timeUnit); } catch (IllegalArgumentException e) { throw new InvalidPropertyException(UTIL.getString("invalid_timeUnit", timeUnit)); //$NON-NLS-1$ } } final TimeUnit timeoutUnit = unit; return new BasicConnectionFactory<CouchbaseConnectionImpl>(){ @Override public CouchbaseConnectionImpl getConnection() throws ResourceException { return new CouchbaseConnectionImpl(environment, connectionString, keyspace, password, timeoutUnit, namespace); }}; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + this.getManagementTimeout().hashCode(); result = prime * result + this.getConnectTimeout().hashCode(); result = prime * result + this.getQueryTimeout().hashCode(); result = prime * result + this.getViewTimeout().hashCode(); result = prime * result + this.getKvTimeout().hashCode(); result = prime * result + this.getSearchTimeout().hashCode(); result = prime * result + this.getDnsSrvEnabled().hashCode(); result = prime * result + this.getConnectionString().hashCode(); result = prime * result + ((this.keyspace == null) ? 0 : this.getKeyspace().hashCode()); result = prime * result + ((this.namespace == null) ? 0 : this.getNamespace().hashCode()); result = prime * result + ((this.password == null) ? 0 : this.getPassword().hashCode()); result = prime * result + ((this.timeUnit == null) ? 0 : this.getTimeUnit().hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } CouchbaseManagedConnectionFactory other = (CouchbaseManagedConnectionFactory) obj; if(this.managementTimeout != other.managementTimeout){ return false; } if(this.queryTimeout != other.queryTimeout) { return false; } if(this.viewTimeout != other.viewTimeout) { return false; } if(this.kvTimeout != other.kvTimeout) { return false; } if(this.searchTimeout != other.searchTimeout) { return false; } if(this.connectTimeout != other.connectTimeout) { return false; } if(!this.dnsSrvEnabled.equals(other.dnsSrvEnabled)) { return false; } if (!checkEquals(this.connectionString, other.connectionString)) { return false; } if (!checkEquals(this.keyspace, other.keyspace)) { return false; } if (!checkEquals(this.namespace, other.namespace)) { return false; } if (!checkEquals(this.password, other.password)) { return false; } if (!checkEquals(this.timeUnit, other.timeUnit)) { return false; } return true; } }
/* Copyright 2007 Alin Dreghiciu. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package org.ops4j.pax.web.service.internal; import java.net.URL; import java.util.Dictionary; import java.util.EventListener; import java.util.Iterator; import java.util.List; import javax.servlet.Filter; import javax.servlet.MultipartConfigElement; import javax.servlet.Servlet; import javax.servlet.ServletContainerInitializer; import javax.servlet.ServletException; import org.ops4j.pax.web.service.SharedWebContainerContext; import org.ops4j.pax.web.service.WebContainer; import org.ops4j.pax.web.service.WebContainerDTO; import org.ops4j.pax.web.service.whiteboard.WhiteboardElement; import org.osgi.framework.Bundle; import org.osgi.service.http.HttpContext; import org.osgi.service.http.NamespaceException; import org.osgi.service.http.runtime.dto.RequestInfoDTO; import org.osgi.service.http.runtime.dto.RuntimeDTO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; class HttpServiceStopped implements StoppableHttpService { private static final Logger LOG = LoggerFactory .getLogger(HttpServiceStopped.class); final Bundle serviceBundle; public HttpServiceStopped() { this(null); } HttpServiceStopped(HttpServiceStarted started) { serviceBundle = started.serviceBundle; LOG.debug("Changing HttpService state to " + this); } /* * (non-Javadoc) * * @see org.osgi.service.http.HttpService#registerServlet(java.lang.String, * javax.servlet.Servlet, java.util.Dictionary, * org.osgi.service.http.HttpContext) */ @Override public void registerServlet(final String alias, final Servlet servlet, @SuppressWarnings("rawtypes") final Dictionary initParams, final HttpContext httpContext) throws ServletException, NamespaceException { LOG.warn("Http service has already been stopped"); } @Override public void registerServlet(String alias, Servlet servlet, @SuppressWarnings("rawtypes") Dictionary initParams, Integer loadOnStartup, Boolean asyncSupported, HttpContext httpContext) throws ServletException, NamespaceException { LOG.warn("Http service has already been stopped"); } @Override public void registerResources(final String alias, final String name, final HttpContext httpContext) throws NamespaceException { LOG.warn("Http service has already been stopped"); } @Override public void unregister(final String alias) { LOG.warn("Http service has already been stopped"); } @Override public HttpContext createDefaultHttpContext() { LOG.warn("Http service has already been stopped"); return null; } @Override public HttpContext createDefaultHttpContext(String contextID) { LOG.warn("Http service has already been stopped"); return null; } @Override public void stop() { LOG.warn("Http service has already been stopped"); } /** * Does nothing. * * @see WebContainer#registerServlet(Servlet, String[], Dictionary, * HttpContext) */ @Override public void registerServlet(final Servlet servlet, final String[] urlPatterns, final Dictionary<String, ?> initParams, final HttpContext httpContext) throws ServletException { LOG.warn("Http service has already been stopped"); } /** * Does nothing. * * @see WebContainer#registerServlet(javax.servlet.Servlet, String, * String[], java.util.Dictionary, org.osgi.service.http.HttpContext) */ @Override public void registerServlet(final Servlet servlet, final String servletName, final String[] urlPatterns, final Dictionary<String, ?> initParams, final HttpContext httpContext) throws ServletException { LOG.warn("Http service has already been stopped"); } /** * Does nothing. * * @see WebContainer#unregisterServlet(Servlet) */ @Override public void unregisterServlet(final Servlet servlet) { LOG.warn("Http service has already been stopped"); } /** * Does nothing. * * @see WebContainer#unregisterServlet(Servlet) */ @Override public void unregisterServlet(final String servletName) { LOG.warn("Http service has already been stopped"); } /** * Does nothing. * * @see org.ops4j.pax.web.service.WebContainer#registerServlet(java.lang.Class, * java.lang.String[], java.util.Dictionary, * org.osgi.service.http.HttpContext) */ @Override public void registerServlet(Class<? extends Servlet> servletClass, String[] urlPatterns, Dictionary<String, ?> initParams, HttpContext httpContext) throws ServletException { LOG.warn("Http service has already been stopped"); } @Override public void registerServlet(Servlet servlet, String[] urlPatterns, Dictionary<String, ?> initParams, Integer loadOnStartup, Boolean asyncSupported, HttpContext httpContext) throws ServletException { this.registerServlet(servlet, urlPatterns, initParams, httpContext); } @Override public void registerServlet(Servlet servlet, String servletName, String[] urlPatterns, Dictionary<String, ?> initParams, Integer loadOnStartup, Boolean asyncSupported, HttpContext httpContext) throws ServletException { this.registerServlet(servlet, servletName, urlPatterns, initParams, httpContext); } @Override public void registerServlet(Class<? extends Servlet> servletClass, String[] urlPatterns, Dictionary<String, ?> initParams, Integer loadOnStartup, Boolean asyncSupported, HttpContext httpContext) throws ServletException { this.registerServlet(servletClass, urlPatterns, initParams, httpContext); } /** * Does nothing. * * @see org.ops4j.pax.web.service.WebContainer#unregisterServlets(java.lang.Class) */ @Override public void unregisterServlets(Class<? extends Servlet> servletClass) { LOG.warn("Http service has already been stopped"); } /** * Does nothing. * * @see WebContainer#registerEventListener(java.util.EventListener, * HttpContext) */ @Override public void registerEventListener(final EventListener listener, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * Does nothing. * * @see WebContainer#unregisterEventListener(java.util.EventListener) */ @Override public void unregisterEventListener(final EventListener listener) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#registerFilter(Filter, String[], String[], Dictionary, * HttpContext) */ @Override public void registerFilter(final Filter filter, final String[] urlPatterns, final String[] servletNames, final Dictionary<String, ?> initParams, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void registerFilter(Filter filter, String[] urlPatterns, String[] servletNames, Dictionary<String, String> initParams, Boolean asyncSupported, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void registerFilter(Class<? extends Filter> filterClass, String[] urlPatterns, String[] servletNames, Dictionary<String, String> initParams, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void registerFilter(Class<? extends Filter> filterClass, String[] urlPatterns, String[] servletNames, Dictionary<String, String> initParams, boolean asyncSupported, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#unregisterFilter(Filter) */ @Override public void unregisterFilter(final Filter filter) { LOG.warn("Http service has already been stopped"); } @Override public void unregisterFilter(Class<? extends Filter> filterClass) { LOG.warn("Http service has already been stopped"); } @Override public void unregisterFilter(String filterName) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#setContextParam(Dictionary, HttpContext) */ @Override public void setContextParam(final Dictionary<String, ?> params, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * {@inheritDoc} */ @Override public void setSessionTimeout(final Integer minutes, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#registerJsps(String[], HttpContext) */ @Override public void registerJsps(final String[] urlPatterns, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#registerJsps(String[], Dictionary, HttpContext) */ @Override public void registerJsps(final String[] urlPatterns, final Dictionary<String, ?> initParams, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#unregisterJsps(HttpContext) */ @Override public void unregisterJsps(final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#unregisterJsps(HttpContext) */ @Override public void unregisterJsps(final String[] urlPatterns, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#registerErrorPage(String, String, HttpContext) */ @Override public void registerErrorPage(final String error, final String location, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#unregisterErrorPage(String, HttpContext) */ @Override public void unregisterErrorPage(final String error, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#registerWelcomeFiles(String[], boolean, HttpContext) */ @Override public void registerWelcomeFiles(final String[] welcomeFiles, final boolean rediect, final HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#unregisterWelcomeFiles(String[], HttpContext) */ @Override public void unregisterWelcomeFiles(String[] welcomeFiles, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#registerLoginConfig(String, String, String, String, HttpContext) */ @Override public void registerLoginConfig(String authMethod, String realmName, String formLoginPage, String formErrorPage, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#unregisterLoginConfig(HttpContext) */ @Override public void unregisterLoginConfig(HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#registerConstraintMapping(java.lang.String, * java.lang.String, java.lang.String, java.lang.String, boolean, * java.util.List, org.osgi.service.http.HttpContext) */ @Override public void registerConstraintMapping(String constraintName, String mapping, String url, String dataConstraint, boolean authentication, List<String> roles, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#unregisterConstraintMapping(HttpContext) */ @Override public void unregisterConstraintMapping(HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } /** * @see WebContainer#getDefaultSharedHttpContext() */ @Override public SharedWebContainerContext getDefaultSharedHttpContext() { LOG.warn("Http service has already been stopped"); return null; } @Override public void registerServletContainerInitializer( ServletContainerInitializer servletContainerInitializer, Class<?>[] classes, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void unregisterServletContainerInitializer(HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void registerJettyWebXml(URL jettyWebXmlURL, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void registerJspServlet(String[] urlPatterns, HttpContext httpContext, String jspFile) { LOG.warn("Http service has already been stopped"); } @Override public void registerJspServlet(String[] urlPatterns, Dictionary<String, ?> initParams, HttpContext httpContext, String jspFile) { LOG.warn("Http service has already been stopped"); } @Override public void setConnectorsAndVirtualHosts(List<String> connectors, List<String> virtualHosts, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void begin(HttpContext httpContext) { } @Override public void end(HttpContext httpContext) { } @Override public SharedWebContainerContext createDefaultSharedHttpContext() { return null; } @Override public void registerServlet(Servlet servlet, String servletName, String[] urlPatterns, Dictionary<String, ?> initParams, Integer loadOnStartup, Boolean asyncSupported, MultipartConfigElement multiPartConfig, HttpContext httpContext) throws ServletException { this.registerServlet(servlet, urlPatterns, initParams, httpContext); } @Override public void registerServlet(Class<? extends Servlet> servletClass, String[] urlPatterns, Dictionary<String, ?> initParams, Integer loadOnStartup, Boolean asyncSupported, MultipartConfigElement multiPartConfig, HttpContext httpContext) throws ServletException { this.registerServlet(servletClass, urlPatterns, initParams, httpContext); } @Override public void registerJspConfigTagLibs(String tagLibLocation, String tagLibUri, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void registerJspConfigPropertyGroup(List<String> includeCodes, List<String> includePreludes, List<String> urlPatterns, Boolean elIgnored, Boolean scriptingInvalid, Boolean isXml, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void registerWebSocket(Object webSocket, HttpContext httpContext) { LOG.warn("Http service has already been stopped"); } @Override public void unregisterWebSocket(Object webSocket, HttpContext httpContext) { LOG.warn("Http service has already been stoped"); } @Override public RequestInfoDTO calculateRequestInfoDTO(String path, Iterator<WhiteboardElement> iterator) { LOG.warn("Http service has already been stoped"); // FIXME check if null valid return null; } @Override public RuntimeDTO createWhiteboardRuntimeDTO(Iterator<WhiteboardElement> iterator) { LOG.warn("Http service has already been stoped"); // FIXME check if null valid return null; } @Override public String toString() { if (serviceBundle == null) { return super.toString(); } else { return super.toString() + " for bundle " + serviceBundle; } } @Override public WebContainerDTO getWebcontainerDTO() { LOG.warn("Http service has already been stoped"); return null; } }
package at.ac.tuwien.dsg.pm.dao; import at.ac.tuwien.dsg.pm.exceptions.CollectiveAlreadyExistsException; import at.ac.tuwien.dsg.pm.model.Collective; import at.ac.tuwien.dsg.smartcom.model.DeliveryPolicy; import at.ac.tuwien.dsg.smartcom.utils.MongoDBInstance; import com.mongodb.MongoClient; import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.List; import static org.junit.Assert.*; public class MongoDBCollectiveDAOTest { private MongoDBInstance mongoDB; private MongoDBCollectiveDAO dao; @Before public void setUp() throws Exception { mongoDB = new MongoDBInstance(); mongoDB.setUp(); MongoClient mongo = new MongoClient("localhost", 12345); dao = new MongoDBCollectiveDAO(mongo, "TEST", "PEER"); } @After public void tearDown() throws Exception { mongoDB.tearDown(); } @Test public void testAddCollective() throws Exception { Collective coll1 = createCollective("coll1"); Collective coll2 = createCollective("coll2"); Collective coll3 = createCollective("coll3"); Collective coll4 = createCollective("coll4"); Collective coll5 = createCollective("coll5"); coll1 = dao.addCollective(coll1); coll2 = dao.addCollective(coll2); coll3 = dao.addCollective(coll3); coll4 = dao.addCollective(coll4); coll5 = dao.addCollective(coll5); assertEquals(coll1, dao.getCollective("coll1")); assertEquals(coll2, dao.getCollective("coll2")); assertEquals(coll3, dao.getCollective("coll3")); assertEquals(coll4, dao.getCollective("coll4")); assertEquals(coll5, dao.getCollective("coll5")); } @Test(expected = CollectiveAlreadyExistsException.class) public void testAddCollective_duplicateKey() throws Exception { Collective coll1 = createCollective("coll1"); Collective coll2 = createCollective("coll2"); Collective coll3 = createCollective("coll3"); Collective coll4 = createCollective("coll4"); Collective coll5 = createCollective("coll5"); coll1 = dao.addCollective(coll1); coll2 = dao.addCollective(coll2); coll3 = dao.addCollective(coll3); coll4 = dao.addCollective(coll4); coll5 = dao.addCollective(coll5); assertEquals(coll1, dao.getCollective("coll1")); assertEquals(coll2, dao.getCollective("coll2")); assertEquals(coll3, dao.getCollective("coll3")); assertEquals(coll4, dao.getCollective("coll4")); assertEquals(coll5, dao.getCollective("coll5")); dao.addCollective(createCollective("coll3")); } @Test public void testAddCollective_keyGeneration() throws Exception { Collective coll1 = createCollective("coll1"); Collective coll2 = createCollective("coll2"); Collective coll3 = createCollective("coll3"); Collective coll4 = createCollective("coll4"); Collective coll5 = createCollective("coll5"); coll1 = dao.addCollective(coll1); coll2 = dao.addCollective(coll2); coll3 = dao.addCollective(coll3); coll4 = dao.addCollective(coll4); coll5 = dao.addCollective(coll5); assertEquals(coll1, dao.getCollective("coll1")); assertEquals(coll2, dao.getCollective("coll2")); assertEquals(coll3, dao.getCollective("coll3")); assertEquals(coll4, dao.getCollective("coll4")); assertEquals(coll5, dao.getCollective("coll5")); Collective coll = new Collective(); coll.setDeliveryPolicy(DeliveryPolicy.Collective.TO_ANY); assertNull(coll.getId()); coll = dao.addCollective(coll); assertNotNull(coll.getId()); assertThat(coll.getId(), Matchers.not(Matchers.isEmptyString())); } @Test public void testGetCollective() throws Exception { assertNull(dao.getCollective("coll1")); Collective coll1 = createCollective("coll1"); coll1 = dao.addCollective(coll1); assertEquals(coll1, dao.getCollective("coll1")); } @Test public void testGetAll() throws Exception { Collective coll1 = createAndAddCollective("coll1"); Collective coll2 = createAndAddCollective("coll2"); Collective coll3 = createAndAddCollective("coll3"); Collective coll4 = createAndAddCollective("coll4"); Collective coll5 = createAndAddCollective("coll5"); List<Collective> all = dao.getAll(); assertThat(all, Matchers.contains(coll1, coll2, coll3, coll4, coll5)); } @Test public void testUpdateCollective() throws Exception { Collective coll1 = createAndAddCollective("coll1"); Collective coll2 = createAndAddCollective("coll2"); Collective coll3 = createAndAddCollective("coll3"); Collective coll4 = createAndAddCollective("coll4"); Collective coll5 = createAndAddCollective("coll5"); coll3.setDeliveryPolicy(DeliveryPolicy.Collective.TO_ANY); coll3 = dao.updateCollective(coll3); assertEquals(DeliveryPolicy.Collective.TO_ANY, coll3.getDeliveryPolicy()); Collective actual = dao.getCollective(coll3.getId()); assertEquals(coll3, actual); coll4.addPeer("asdf"); coll4 = dao.updateCollective(coll3); assertThat(coll4.getPeers(), Matchers.hasSize(0)); actual = dao.getCollective(coll3.getId()); assertEquals(coll4, actual); } @Test public void testAddPeerToCollective() throws Exception { Collective coll1 = createAndAddCollective("coll1"); Collective coll2 = createAndAddCollective("coll2"); Collective coll3 = createAndAddCollective("coll3"); Collective coll4 = createAndAddCollective("coll4"); Collective coll5 = createAndAddCollective("coll5"); Collective actual = dao.addPeerToCollective(coll1.getId(), "peer1"); assertThat(actual.getPeers(), Matchers.contains("peer1")); actual = dao.addPeerToCollective(coll1.getId(), "peer2"); assertThat(actual.getPeers(), Matchers.contains("peer1", "peer2")); actual = dao.addPeerToCollective(coll1.getId(), "peer2"); assertThat(actual.getPeers(), Matchers.hasSize(2)); assertThat(actual.getPeers(), Matchers.contains("peer1", "peer2")); Collective actual2 = dao.addPeerToCollective(coll2.getId(), "peer2"); assertThat(actual2.getPeers(), Matchers.hasSize(1)); assertThat(actual2.getPeers(), Matchers.contains("peer2")); actual = dao.getCollective("coll1"); assertThat(actual.getPeers(), Matchers.hasSize(2)); assertThat(actual.getPeers(), Matchers.contains("peer1", "peer2")); } @Test public void testRemovePeerToCollective() throws Exception { Collective coll1 = createAndAddCollective("coll1"); Collective coll2 = createAndAddCollective("coll2"); Collective coll3 = createAndAddCollective("coll3"); Collective coll4 = createAndAddCollective("coll4"); Collective coll5 = createAndAddCollective("coll5"); Collective actual = dao.addPeerToCollective(coll1.getId(), "peer1"); assertThat(actual.getPeers(), Matchers.contains("peer1")); actual = dao.addPeerToCollective(coll1.getId(), "peer2"); assertThat(actual.getPeers(), Matchers.contains("peer1", "peer2")); actual = dao.removePeerToCollective(coll1.getId(), "peer3"); assertThat(actual.getPeers(), Matchers.contains("peer1", "peer2")); actual = dao.removePeerToCollective(coll1.getId(), "peer1"); assertThat(actual.getPeers(), Matchers.contains("peer2")); assertThat(actual.getPeers(), Matchers.not(Matchers.contains("peer1"))); actual = dao.getCollective(coll1.getId()); assertThat(actual.getPeers(), Matchers.contains("peer2")); assertThat(actual.getPeers(), Matchers.not(Matchers.contains("peer1"))); actual = dao.removePeerToCollective(coll1.getId(), "peer2"); assertThat(actual.getPeers(), Matchers.not(Matchers.contains("peer2"))); actual = dao.removePeerToCollective(coll1.getId(), "peer3"); assertThat(actual.getPeers(), Matchers.not(Matchers.contains("peer2"))); } @Test public void testDeleteCollective() throws Exception { Collective coll1 = createAndAddCollective("coll1"); Collective coll2 = createAndAddCollective("coll2"); Collective coll3 = createAndAddCollective("coll3"); Collective coll4 = createAndAddCollective("coll4"); Collective coll5 = createAndAddCollective("coll5"); assertNotNull(dao.getCollective(coll2.getId())); dao.deleteCollective(coll2.getId()); assertNull(dao.getCollective(coll2.getId())); } @Test public void testClearData() throws Exception { Collective coll1 = createAndAddCollective("coll1"); Collective coll2 = createAndAddCollective("coll2"); Collective coll3 = createAndAddCollective("coll3"); Collective coll4 = createAndAddCollective("coll4"); Collective coll5 = createAndAddCollective("coll5"); assertThat(dao.getAll(), Matchers.contains(coll1, coll2, coll3, coll4, coll5)); dao.clearData(); assertThat(dao.getAll(), Matchers.empty()); } private Collective createAndAddCollective(String id) throws CollectiveAlreadyExistsException { return dao.addCollective(createCollective(id)); } private Collective createCollective(String id) { Collective coll = new Collective(); coll.setId(id); coll.setDeliveryPolicy(DeliveryPolicy.Collective.TO_ALL_MEMBERS); return coll; } }