gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ */ package com.feicent.zhang.project.springside.concurrent.jsr166e; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.concurrent.atomic.AtomicLong; /** * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/jsr166e/LongAdder.java 1.17 * * One or more variables that together maintain an initially zero * {@code long} sum. When updates (method {@link #add}) are contended * across threads, the set of variables may grow dynamically to reduce * contention. Method {@link #sum} (or, equivalently, {@link * #longValue}) returns the current total combined across the * variables maintaining the sum. * * <p>This class is usually preferable to {@link AtomicLong} when * multiple threads update a common sum that is used for purposes such * as collecting statistics, not for fine-grained synchronization * control. Under low update contention, the two classes have similar * characteristics. But under high contention, expected throughput of * this class is significantly higher, at the expense of higher space * consumption. * * <p>This class extends {@link Number}, but does <em>not</em> define * methods such as {@code equals}, {@code hashCode} and {@code * compareTo} because instances are expected to be mutated, and so are * not useful as collection keys. * * <p><em>jsr166e note: This class is targeted to be placed in * java.util.concurrent.atomic.</em> * * @since 1.8 * @author Doug Lea */ public class LongAdder extends Striped64 implements Serializable { private static final long serialVersionUID = 7249069246863182397L; /** * Version of plus for use in retryUpdate */ final long fn(long v, long x) { return v + x; } /** * Creates a new adder with initial sum of zero. */ public LongAdder() { } /** * Adds the given value. * * @param x the value to add */ public void add(long x) { Cell[] as; long b, v; int[] hc; Cell a; int n; if ((as = cells) != null || !casBase(b = base, b + x)) { boolean uncontended = true; if ((hc = threadHashCode.get()) == null || as == null || (n = as.length) < 1 || (a = as[(n - 1) & hc[0]]) == null || !(uncontended = a.cas(v = a.value, v + x))) retryUpdate(x, hc, uncontended); } } /** * Equivalent to {@code add(1)}. */ public void increment() { add(1L); } /** * Equivalent to {@code add(-1)}. */ public void decrement() { add(-1L); } /** * Returns the current sum. The returned value is <em>NOT</em> an * atomic snapshot; invocation in the absence of concurrent * updates returns an accurate result, but concurrent updates that * occur while the sum is being calculated might not be * incorporated. * * @return the sum */ public long sum() { long sum = base; Cell[] as = cells; if (as != null) { int n = as.length; for (int i = 0; i < n; ++i) { Cell a = as[i]; if (a != null) sum += a.value; } } return sum; } /** * Resets variables maintaining the sum to zero. This method may * be a useful alternative to creating a new adder, but is only * effective if there are no concurrent updates. Because this * method is intrinsically racy, it should only be used when it is * known that no threads are concurrently updating. */ public void reset() { internalReset(0L); } /** * Equivalent in effect to {@link #sum} followed by {@link * #reset}. This method may apply for example during quiescent * points between multithreaded computations. If there are * updates concurrent with this method, the returned value is * <em>not</em> guaranteed to be the final value occurring before * the reset. * * @return the sum */ public long sumThenReset() { long sum = base; Cell[] as = cells; base = 0L; if (as != null) { int n = as.length; for (int i = 0; i < n; ++i) { Cell a = as[i]; if (a != null) { sum += a.value; a.value = 0L; } } } return sum; } /** * Returns the String representation of the {@link #sum}. * @return the String representation of the {@link #sum} */ public String toString() { return Long.toString(sum()); } /** * Equivalent to {@link #sum}. * * @return the sum */ public long longValue() { return sum(); } /** * Returns the {@link #sum} as an {@code int} after a narrowing * primitive conversion. */ public int intValue() { return (int)sum(); } /** * Returns the {@link #sum} as a {@code float} * after a widening primitive conversion. */ public float floatValue() { return (float)sum(); } /** * Returns the {@link #sum} as a {@code double} after a widening * primitive conversion. */ public double doubleValue() { return (double)sum(); } private void writeObject(ObjectOutputStream s) throws IOException { s.defaultWriteObject(); s.writeLong(sum()); } private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException { s.defaultReadObject(); busy = 0; cells = null; base = s.readLong(); } }
/* * RED5 Open Source Media Server - https://github.com/Red5/ Copyright 2006-2016 by respective authors (see below). All rights reserved. Licensed under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless * required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.red5.server.stream.consumer; import java.util.Optional; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.mina.core.buffer.IoBuffer; import org.red5.server.api.stream.IClientStream; import org.red5.server.messaging.IMessage; import org.red5.server.messaging.IMessageComponent; import org.red5.server.messaging.IPipe; import org.red5.server.messaging.IPipeConnectionListener; import org.red5.server.messaging.IPushableConsumer; import org.red5.server.messaging.OOBControlMessage; import org.red5.server.messaging.PipeConnectionEvent; import org.red5.server.net.rtmp.Channel; import org.red5.server.net.rtmp.RTMPConnection; import org.red5.server.net.rtmp.event.AudioData; import org.red5.server.net.rtmp.event.BaseEvent; import org.red5.server.net.rtmp.event.BytesRead; import org.red5.server.net.rtmp.event.ChunkSize; import org.red5.server.net.rtmp.event.FlexStreamSend; import org.red5.server.net.rtmp.event.IRTMPEvent; import org.red5.server.net.rtmp.event.Notify; import org.red5.server.net.rtmp.event.Ping; import org.red5.server.net.rtmp.event.VideoData; import org.red5.server.net.rtmp.message.Constants; import org.red5.server.net.rtmp.message.Header; import org.red5.server.stream.message.RTMPMessage; import org.red5.server.stream.message.ResetMessage; import org.red5.server.stream.message.StatusMessage; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * RTMP connection consumer. */ public class ConnectionConsumer implements IPushableConsumer, IPipeConnectionListener { private static final Logger log = LoggerFactory.getLogger(ConnectionConsumer.class); /** * Connection consumer class name */ public static final String KEY = ConnectionConsumer.class.getName(); /** * Connection object */ private RTMPConnection conn; /** * Video channel */ private Channel video; /** * Audio channel */ private Channel audio; /** * Data channel */ private Channel data; /** * Chunk size. Packets are sent chunk-by-chunk. */ private int chunkSize = 1024; //TODO: Not sure of the best value here /** * Whether or not the chunk size has been sent. This seems to be required for h264. */ private AtomicBoolean chunkSizeSent = new AtomicBoolean(false); /** * Create RTMP connection consumer for given connection and channels. * * @param conn * RTMP connection * @param videoChannel * Video channel * @param audioChannel * Audio channel * @param dataChannel * Data channel */ public ConnectionConsumer(RTMPConnection conn, Channel videoChannel, Channel audioChannel, Channel dataChannel) { log.debug("Channel ids - video: {} audio: {} data: {}", new Object[] { videoChannel, audioChannel, dataChannel }); this.conn = conn; this.video = videoChannel; this.audio = audioChannel; this.data = dataChannel; } /** * Create connection consumer without an RTMP connection. * * @param videoChannel * video channel * @param audioChannel * audio channel * @param dataChannel * data channel */ public ConnectionConsumer(Channel videoChannel, Channel audioChannel, Channel dataChannel) { this.video = videoChannel; this.audio = audioChannel; this.data = dataChannel; } /** {@inheritDoc} */ public void pushMessage(IPipe pipe, IMessage message) { //log.trace("pushMessage - type: {}", message.getMessageType()); if (message instanceof ResetMessage) { //ignore } else if (message instanceof StatusMessage) { StatusMessage statusMsg = (StatusMessage) message; data.sendStatus(statusMsg.getBody()); } else if (message instanceof RTMPMessage) { // make sure chunk size has been sent sendChunkSize(); // cast to rtmp message RTMPMessage rtmpMsg = (RTMPMessage) message; IRTMPEvent msg = rtmpMsg.getBody(); // get timestamp int eventTime = msg.getTimestamp(); log.debug("Message timestamp: {}", eventTime); if (eventTime < 0) { //eventTime += Integer.MIN_VALUE; //log.debug("Message has negative timestamp, applying {} offset: {}", Integer.MIN_VALUE, eventTime); // everyone seems to prefer positive timestamps eventTime += (eventTime * -1); log.debug("Message has negative timestamp, flipping it to positive: {}", Integer.MIN_VALUE, eventTime); msg.setTimestamp(eventTime); } // get the data type byte dataType = msg.getDataType(); if (log.isTraceEnabled()) { log.trace("Data type: {} source type: {}", dataType, ((BaseEvent) msg).getSourceType()); } // create a new header for the consumer if the message.body doesnt already have one final Header header = Optional.ofNullable(msg.getHeader()).orElse(new Header()); // XXX sets the timerbase, but should we do this if there's already a timerbase? header.setTimerBase(eventTime); // data buffer IoBuffer buf = null; switch (dataType) { case Constants.TYPE_AGGREGATE: //log.trace("Aggregate data"); data.write(msg); break; case Constants.TYPE_AUDIO_DATA: //log.trace("Audio data"); buf = ((AudioData) msg).getData(); if (buf != null) { AudioData audioData = new AudioData(buf.asReadOnlyBuffer()); audioData.setHeader(header); audioData.setTimestamp(header.getTimer()); audioData.setSourceType(((AudioData) msg).getSourceType()); audio.write(audioData); } else { log.warn("Audio data was not found"); } break; case Constants.TYPE_VIDEO_DATA: //log.trace("Video data"); buf = ((VideoData) msg).getData(); if (buf != null) { VideoData videoData = new VideoData(buf.asReadOnlyBuffer()); videoData.setHeader(header); videoData.setTimestamp(header.getTimer()); videoData.setSourceType(((VideoData) msg).getSourceType()); video.write(videoData); } else { log.warn("Video data was not found"); } break; case Constants.TYPE_PING: //log.trace("Ping"); Ping ping = (Ping) msg; ping.setHeader(header); conn.ping(ping); break; case Constants.TYPE_STREAM_METADATA: if (log.isTraceEnabled()) { log.trace("Meta data: {}", (Notify) msg); } //Notify notify = new Notify(((Notify) msg).getData().asReadOnlyBuffer()); Notify notify = (Notify) msg; notify.setHeader(header); notify.setTimestamp(header.getTimer()); data.write(notify); break; case Constants.TYPE_FLEX_STREAM_SEND: //if (log.isTraceEnabled()) { //log.trace("Flex stream send: {}", (Notify) msg); //} FlexStreamSend send = null; if (msg instanceof FlexStreamSend) { send = (FlexStreamSend) msg; } else { send = new FlexStreamSend(((Notify) msg).getData().asReadOnlyBuffer()); } send.setHeader(header); send.setTimestamp(header.getTimer()); data.write(send); break; case Constants.TYPE_BYTES_READ: //log.trace("Bytes read"); BytesRead bytesRead = (BytesRead) msg; bytesRead.setHeader(header); bytesRead.setTimestamp(header.getTimer()); conn.getChannel((byte) 2).write(bytesRead); break; default: //log.trace("Default: {}", dataType); data.write(msg); } } else { log.debug("Unhandled push message: {}", message); if (log.isTraceEnabled()) { Class<? extends IMessage> clazz = message.getClass(); log.trace("Class info - name: {} declaring: {} enclosing: {}", new Object[] { clazz.getName(), clazz.getDeclaringClass(), clazz.getEnclosingClass() }); } } } /** {@inheritDoc} */ public void onPipeConnectionEvent(PipeConnectionEvent event) { if (event.getType().equals(PipeConnectionEvent.EventType.PROVIDER_DISCONNECT)) { closeChannels(); } } /** {@inheritDoc} */ public void onOOBControlMessage(IMessageComponent source, IPipe pipe, OOBControlMessage oobCtrlMsg) { if ("ConnectionConsumer".equals(oobCtrlMsg.getTarget())) { String serviceName = oobCtrlMsg.getServiceName(); log.trace("Service name: {}", serviceName); if ("pendingCount".equals(serviceName)) { oobCtrlMsg.setResult(conn.getPendingMessages()); } else if ("pendingVideoCount".equals(serviceName)) { IClientStream stream = conn.getStreamByChannelId(video.getId()); if (stream != null) { oobCtrlMsg.setResult(conn.getPendingVideoMessages(stream.getStreamId())); } else { oobCtrlMsg.setResult(0L); } } else if ("writeDelta".equals(serviceName)) { //TODO: Revisit the max stream value later long maxStream = 120 * 1024; // Return the current delta between sent bytes and bytes the client // reported to have received, and the interval the client should use // for generating BytesRead messages (half of the allowed bandwidth). oobCtrlMsg.setResult(new Long[] { conn.getWrittenBytes() - conn.getClientBytesRead(), maxStream / 2 }); } else if ("chunkSize".equals(serviceName)) { int newSize = (Integer) oobCtrlMsg.getServiceParamMap().get("chunkSize"); if (newSize != chunkSize) { chunkSize = newSize; chunkSizeSent.set(false); sendChunkSize(); } } } } /** * Send the chunk size */ private void sendChunkSize() { if (chunkSizeSent.compareAndSet(false, true)) { log.debug("Sending chunk size: {}", chunkSize); ChunkSize chunkSizeMsg = new ChunkSize(chunkSize); conn.getChannel((byte) 2).write(chunkSizeMsg); } } /** * Close all the channels */ private void closeChannels() { conn.closeChannel(video.getId()); conn.closeChannel(audio.getId()); conn.closeChannel(data.getId()); } }
/* Copyright 2001,2003 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.svggen; import java.awt.Rectangle; import java.awt.image.BufferedImageOp; import java.awt.image.ByteLookupTable; import java.awt.image.LookupOp; import java.awt.image.LookupTable; import org.w3c.dom.Document; import org.w3c.dom.Element; /** * Utility class that converts a LookupOp object into * an SVG filter descriptor. The SVG filter corresponding * to a LookupOp is an feComponentTransfer, with a type * set to 'table', the tableValues set to the content * of the lookup table. * * @author <a href="mailto:vincent.hardy@eng.sun.com">Vincent Hardy</a> * @version $Id$ * @see org.apache.batik.svggen.SVGBufferedImageOp */ public class SVGLookupOp extends AbstractSVGFilterConverter { /** * Gamma for linear to sRGB convertion */ private static final double GAMMA = 1./2.4; /** * Lookup table for linear to sRGB value * forward and backward mapping */ private static final int linearToSRGBLut[] = new int[256]; private static final int sRGBToLinear[] = new int[256]; static { for(int i=0; i<256; i++) { // linear to sRGB float value = i/255f; if (value <= 0.0031308) { value *= 12.92f; } else { value = 1.055f * ((float) Math.pow(value, GAMMA)) - 0.055f; } linearToSRGBLut[i] = Math.round(value*255); // sRGB to linear value = i/255f; if(value <= 0.04045){ value /= 12.92f; } else { value = (float)Math.pow((value + 0.055f)/1.055f, 1/GAMMA); } sRGBToLinear[i] = Math.round(value*255); } } /** * @param generatorContext used to build Elements */ public SVGLookupOp(SVGGeneratorContext generatorContext) { super(generatorContext); } /** * Converts a Java 2D API BufferedImageOp into * a set of attribute/value pairs and related definitions * * @param filter BufferedImageOp filter to be converted * @param filterRect Rectangle, in device space, that defines the area * to which filtering applies. May be null, meaning that the * area is undefined. * @return descriptor of the attributes required to represent * the input filter * @see org.apache.batik.svggen.SVGFilterDescriptor */ public SVGFilterDescriptor toSVG(BufferedImageOp filter, Rectangle filterRect) { if (filter instanceof LookupOp) return toSVG((LookupOp)filter); else return null; } /** * @param lookupOp the LookupOp to be converted * @return a description of the SVG filter corresponding to * lookupOp. The definition of the feComponentTransfer * filter in put in feComponentTransferDefSet */ public SVGFilterDescriptor toSVG(LookupOp lookupOp) { // Reuse definition if lookupOp has already been converted SVGFilterDescriptor filterDesc = (SVGFilterDescriptor)descMap.get(lookupOp); Document domFactory = generatorContext.domFactory; if (filterDesc == null) { // // First time filter is converted: create its corresponding // SVG filter // Element filterDef = domFactory.createElementNS(SVG_NAMESPACE_URI, SVG_FILTER_TAG); Element feComponentTransferDef = domFactory.createElementNS(SVG_NAMESPACE_URI, SVG_FE_COMPONENT_TRANSFER_TAG); // Append transfer function for each component, setting // the attributes corresponding to the scale and offset. // Because we are using a LookupOp as a BufferedImageOp, // the number of lookup table must be: // + 1, in which case the same lookup is applied to the // Red, Green and Blue components, // + 3, in which case the lookup tables apply to the // Red, Green and Blue components // + 4, in which case the lookup tables apply to the // Red, Green, Blue and Alpha components String lookupTables[] = convertLookupTables(lookupOp); Element feFuncR = domFactory.createElementNS(SVG_NAMESPACE_URI, SVG_FE_FUNC_R_TAG); Element feFuncG = domFactory.createElementNS(SVG_NAMESPACE_URI, SVG_FE_FUNC_G_TAG); Element feFuncB = domFactory.createElementNS(SVG_NAMESPACE_URI, SVG_FE_FUNC_B_TAG); Element feFuncA = null; String type = SVG_TABLE_VALUE; if(lookupTables.length == 1){ feFuncR.setAttributeNS(null, SVG_TYPE_ATTRIBUTE, type); feFuncG.setAttributeNS(null, SVG_TYPE_ATTRIBUTE, type); feFuncB.setAttributeNS(null, SVG_TYPE_ATTRIBUTE, type); feFuncR.setAttributeNS(null, SVG_TABLE_VALUES_ATTRIBUTE, lookupTables[0]); feFuncG.setAttributeNS(null, SVG_TABLE_VALUES_ATTRIBUTE, lookupTables[0]); feFuncB.setAttributeNS(null, SVG_TABLE_VALUES_ATTRIBUTE, lookupTables[0]); } else if(lookupTables.length >= 3){ feFuncR.setAttributeNS(null, SVG_TYPE_ATTRIBUTE, type); feFuncG.setAttributeNS(null, SVG_TYPE_ATTRIBUTE, type); feFuncB.setAttributeNS(null, SVG_TYPE_ATTRIBUTE, type); feFuncR.setAttributeNS(null, SVG_TABLE_VALUES_ATTRIBUTE, lookupTables[0]); feFuncG.setAttributeNS(null, SVG_TABLE_VALUES_ATTRIBUTE, lookupTables[1]); feFuncB.setAttributeNS(null, SVG_TABLE_VALUES_ATTRIBUTE, lookupTables[2]); if(lookupTables.length == 4){ feFuncA = domFactory.createElementNS(SVG_NAMESPACE_URI, SVG_FE_FUNC_A_TAG); feFuncA.setAttributeNS(null, SVG_TYPE_ATTRIBUTE, type); feFuncA.setAttributeNS(null, SVG_TABLE_VALUES_ATTRIBUTE, lookupTables[3]); } } feComponentTransferDef.appendChild(feFuncR); feComponentTransferDef.appendChild(feFuncG); feComponentTransferDef.appendChild(feFuncB); if(feFuncA != null) feComponentTransferDef.appendChild(feFuncA); filterDef.appendChild(feComponentTransferDef); filterDef. setAttributeNS(null, SVG_ID_ATTRIBUTE, generatorContext.idGenerator. generateID(ID_PREFIX_FE_COMPONENT_TRANSFER)); // // Create a filter descriptor // // Process filter attribute StringBuffer filterAttrBuf = new StringBuffer(URL_PREFIX); filterAttrBuf.append(SIGN_POUND); filterAttrBuf.append(filterDef.getAttributeNS(null, SVG_ID_ATTRIBUTE)); filterAttrBuf.append(URL_SUFFIX); filterDesc = new SVGFilterDescriptor(filterAttrBuf.toString(), filterDef); defSet.add(filterDef); descMap.put(lookupOp, filterDesc); } return filterDesc; } /** * Converts the filter's LookupTable into an array of corresponding SVG * table strings */ private String[] convertLookupTables(LookupOp lookupOp){ LookupTable lookupTable = lookupOp.getTable(); int nComponents = lookupTable.getNumComponents(); if((nComponents != 1) && (nComponents != 3) && (nComponents != 4)) throw new SVGGraphics2DRuntimeException(ERR_ILLEGAL_BUFFERED_IMAGE_LOOKUP_OP); StringBuffer lookupTableBuf[] = new StringBuffer[nComponents]; for(int i=0; i<nComponents; i++) lookupTableBuf[i] = new StringBuffer(); if(!(lookupTable instanceof ByteLookupTable)){ int src[] = new int[nComponents]; int dest[] = new int[nComponents]; int offset = lookupTable.getOffset(); // Offsets are used for constrained sources. Therefore, // the lookup values should never be used under offset. // There is no SVG equivalent for this behavior. // These values are mapped to identity. for(int i=0; i<offset; i++){ // Fill in string buffers for(int j=0; j<nComponents; j++){ // lookupTableBuf[j].append(Integer.toString(i)); lookupTableBuf[j].append(doubleString(i/255.)); lookupTableBuf[j].append(SPACE); } } for(int i=offset; i<=255; i++){ // Fill in source array for(int j=0; j<nComponents; j++) src[j] = i; // Get destination values lookupTable.lookupPixel(src, dest); // Fill in string buffers for(int j=0; j<nComponents; j++){ lookupTableBuf[j].append(doubleString(dest[j]/255.)); // lookupTableBuf[j].append(Integer.toString(dest[j])); lookupTableBuf[j].append(SPACE); } } } else{ byte src[] = new byte[nComponents]; byte dest[] = new byte[nComponents]; int offset = lookupTable.getOffset(); // Offsets are used for constrained sources. Therefore, // the lookup values should never be used under offset. // There is no SVG equivalent for this behavior. // These values are mapped to identity. for(int i=0; i<offset; i++){ // Fill in string buffers for(int j=0; j<nComponents; j++){ // lookupTableBuf[j].append(Integer.toString(i)); lookupTableBuf[j].append(doubleString(i/255.)); lookupTableBuf[j].append(SPACE); } } for(int i=0; i<=255; i++){ // Fill in source array for(int j=0; j<nComponents; j++) { src[j] = (byte)(0xff & i); } // Get destination values ((ByteLookupTable)lookupTable).lookupPixel(src, dest); // Fill in string buffers for(int j=0; j<nComponents; j++){ lookupTableBuf[j].append(doubleString((0xff & dest[j])/255.)); lookupTableBuf[j].append(SPACE); } } } String lookupTables[] = new String[nComponents]; for(int i=0; i<nComponents; i++) lookupTables[i] = lookupTableBuf[i].toString().trim(); /*for(int i=0; i<lookupTables.length; i++){ System.out.println(lookupTables[i]); }*/ return lookupTables; } }
/** * Copyright 2014 Cloudera Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kitesdk.data.spi.filesystem; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import java.io.IOException; import java.util.Iterator; import java.util.List; import org.apache.avro.Schema; import org.apache.avro.generic.GenericData; import org.apache.avro.hadoop.io.AvroSerialization; import org.apache.avro.mapred.AvroKey; import org.apache.avro.mapreduce.AvroJob; import org.apache.avro.mapreduce.AvroKeyInputFormat; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat; import org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader; import org.apache.hadoop.mapreduce.lib.input.CombineFileSplit; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.kitesdk.compat.DynMethods; import org.kitesdk.compat.Hadoop; import org.kitesdk.data.Format; import org.kitesdk.data.Formats; import org.kitesdk.data.spi.AbstractKeyRecordReaderWrapper; import org.kitesdk.data.spi.AbstractRefinableView; import org.kitesdk.data.spi.DataModelUtil; import org.kitesdk.data.spi.FilteredRecordReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import parquet.avro.AvroParquetInputFormat; import parquet.avro.AvroReadSupport; class FileSystemViewKeyInputFormat<E> extends InputFormat<E, Void> { private static final Logger LOG = LoggerFactory.getLogger(FileSystemViewKeyInputFormat.class); // Constant from AvroJob copied here so we can set it on the Configuration // given to this class. private static final String AVRO_SCHEMA_INPUT_KEY = "avro.schema.input.key"; // this is required for 1.7.4 because setDataModelClass is not available private static final DynMethods.StaticMethod setModel = new DynMethods.Builder("setDataModelClass") .impl(AvroSerialization.class, Configuration.class, Class.class) .defaultNoop() .buildStatic(); private FileSystemDataset<E> dataset; private FileSystemView<E> view; public FileSystemViewKeyInputFormat(FileSystemDataset<E> dataset, Configuration conf) { this.dataset = dataset; this.view = null; LOG.debug("Dataset: {}", dataset); Format format = dataset.getDescriptor().getFormat(); setConfigProperties(conf, format, dataset.getSchema(), dataset.getType()); } public FileSystemViewKeyInputFormat(FileSystemView<E> view, Configuration conf) { this.dataset = (FileSystemDataset<E>) view.getDataset(); this.view = view; LOG.debug("View: {}", view); Format format = dataset.getDescriptor().getFormat(); setConfigProperties(conf, format, view.getSchema(), view.getType()); } private static void setConfigProperties(Configuration conf, Format format, Schema schema, Class<?> type) { GenericData model = DataModelUtil.getDataModelForType(type); if (Formats.AVRO.equals(format)) { setModel.invoke(conf, model.getClass()); conf.set(AVRO_SCHEMA_INPUT_KEY, schema.toString()); } else if (Formats.PARQUET.equals(format)) { // TODO: update to a version of Parquet with setAvroDataSupplier //AvroReadSupport.setAvroDataSupplier(conf, // DataModelUtil.supplierClassFor(model)); AvroReadSupport.setAvroReadSchema(conf, schema); } } @Override @SuppressWarnings({"unchecked", "deprecation"}) public List<InputSplit> getSplits(JobContext jobContext) throws IOException { Configuration conf = Hadoop.JobContext.getConfiguration.invoke(jobContext); Job job = new Job(conf); Format format = dataset.getDescriptor().getFormat(); if (setInputPaths(jobContext, job)) { if (Formats.AVRO.equals(format)) { AvroJob.setInputKeySchema(job, dataset.getDescriptor().getSchema()); AvroCombineInputFormat<E> delegate = new AvroCombineInputFormat<E>(); return delegate.getSplits(jobContext); } else if (Formats.PARQUET.equals(format)) { AvroParquetCombineInputFormat delegate = new AvroParquetCombineInputFormat(); return delegate.getSplits(jobContext); } else if (Formats.JSON.equals(format)) { return new JSONInputFormat().getSplits(jobContext); } else if (Formats.CSV.equals(format)) { // this generates an unchecked cast exception? return new CSVInputFormat().getSplits(jobContext); } else if (Formats.INPUTFORMAT.equals(format)) { return InputFormatUtil.newInputFormatInstance(dataset.getDescriptor()) .getSplits(jobContext); } else { throw new UnsupportedOperationException( "Not a supported format: " + format); } } else { return ImmutableList.of(); } } @SuppressWarnings("unchecked") private boolean setInputPaths(JobContext jobContext, Job job) throws IOException { List<Path> paths = Lists.newArrayList((Iterator) (view == null ? dataset.pathIterator() : view.pathIterator())); LOG.debug("Input paths: {}", paths); if (paths.isEmpty()) { return false; } FileInputFormat.setInputPaths(job, paths.toArray(new Path[paths.size()])); // the following line is needed for Hadoop 1, otherwise the paths are not set Configuration contextConf = Hadoop.JobContext .getConfiguration.invoke(jobContext); Configuration jobConf = Hadoop.JobContext .getConfiguration.invoke(job); contextConf.set("mapred.input.dir", jobConf.get("mapred.input.dir")); return true; } @Override public RecordReader<E, Void> createRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { RecordReader<E, Void> unfilteredRecordReader = createUnfilteredRecordReader (inputSplit, taskAttemptContext); if (view != null) { // use the constraints to filter out entities from the reader return new FilteredRecordReader<E>(unfilteredRecordReader, ((AbstractRefinableView) view).getConstraints(), view.getAccessor()); } return unfilteredRecordReader; } @SuppressWarnings("unchecked") private RecordReader<E, Void> createUnfilteredRecordReader(InputSplit inputSplit, TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { Format format = dataset.getDescriptor().getFormat(); if (Formats.AVRO.equals(format)) { return new AvroKeyReaderWrapper(new AvroCombineInputFormat<E>()); } else if (Formats.PARQUET.equals(format)) { return new ValueReaderWrapper(new AvroParquetCombineInputFormat()); } else if (Formats.JSON.equals(format)) { JSONInputFormat<E> delegate = new JSONInputFormat<E>(); delegate.setView(view != null ? view : dataset); return delegate.createRecordReader(inputSplit, taskAttemptContext); } else if (Formats.CSV.equals(format)) { CSVInputFormat<E> delegate = new CSVInputFormat<E>(); delegate.setView(view != null ? view : dataset); return delegate.createRecordReader(inputSplit, taskAttemptContext); } else if (Formats.INPUTFORMAT.equals(format)) { return InputFormatUtil.newRecordReader(dataset.getDescriptor()); } else { throw new UnsupportedOperationException( "Not a supported format: " + format); } } private static class AvroKeyReaderWrapper<E> extends AbstractKeyRecordReaderWrapper<E, AvroKey<E>, NullWritable> { public AvroKeyReaderWrapper(AvroCombineInputFormat<E> inputFormat) { super(inputFormat); } @Override public E getCurrentKey() throws IOException, InterruptedException { return delegate.getCurrentKey().datum(); } } private static class AvroCombineFileRecordReader<E> extends AbstractCombineFileRecordReader<AvroKey<E>, NullWritable> { public AvroCombineFileRecordReader(CombineFileSplit split, TaskAttemptContext context, Integer idx) { super(split, context, idx); } @Override FileInputFormat<AvroKey<E>, NullWritable> getInputFormat() { return new AvroKeyInputFormat<E>(); } } /** * Combines multiple small Avro files into a single input split. */ private static class AvroCombineInputFormat<E> extends AbstractKiteCombineFileInputFormat<AvroKey<E>, NullWritable> { @Override Class<? extends AbstractCombineFileRecordReader> getRecordReaderClass() { return AvroCombineFileRecordReader.class; } } private static class AvroParquetCombineFileRecordReader<E> extends AbstractCombineFileRecordReader<Void, E> { public AvroParquetCombineFileRecordReader(CombineFileSplit split, TaskAttemptContext context, Integer idx) { super(split, context, idx); } @Override FileInputFormat<Void, E> getInputFormat() { return new AvroParquetInputFormat<E>(); } } /** * Combines multiple small Parquet files into a single input split. */ private static class AvroParquetCombineInputFormat<E> extends AbstractKiteCombineFileInputFormat<Void, E> { @Override Class<? extends AbstractCombineFileRecordReader> getRecordReaderClass() { return AvroParquetCombineFileRecordReader.class; } } }
/* * Copyright 2011 frdfsnlght <frdfsnlght@gmail.com>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.frdfsnlght.transporter.api; import java.util.Map; import java.util.Set; import org.bukkit.GameMode; import org.bukkit.block.BlockFace; /** * Represents a local gate on the local server. * * @author frdfsnlght <frdfsnlght@gmail.com> */ public interface LocalGate extends Gate { /** * Saves the gate's configuration to disk. * * @param force true to force the save even if no changes have been made */ public void save(boolean force); /** * Rebuilds the block structure of the gate. */ public void rebuild(); /** * Adds a PIN to the gate's list of allowed PINs. * * @param pin the PIN to add * @return true if the PIN was added, false if it already exists * @throws GateException if the PIN is invalid */ public boolean addPin(String pin) throws GateException; /** * Removes a PIN from the gate's list of allowed PINs. * * @param pin the PIN to remove * @return true if the PIN was removed, false if it doesn't exist */ public boolean removePin(String pin); /** * Removes all PINs from the gate's list of allowed PINs. */ public void removeAllPins(); /** * Returns true if the gate's list of allowed PINs contains the specified PIN. * * @param pin the pin * @return true if the PIN is in the gate's list of allowed PINs */ public boolean hasPin(String pin); /** * Returns the set of items that are in the gate's banned item list. * * @return a set of item strings */ public Set<String> getBannedItems(); /** * Adds an item to the gate's banned item list. * * @param item the item to add * @return true if the item was added, false if it already exists * @throws GateException if the item is invalid */ public boolean addBannedItem(String item) throws GateException; /** * Removes an item from the gate's banned item list. * * @param item the item to remove * @return true if the item was removed, false if it doesn't exist * @throws GateException if the item is invalid */ public boolean removeBannedItem(String item) throws GateException; /** * Removes all the items from the gate's banned item list. */ public void removeAllBannedItems(); /** * Returns the set of items that are in the gate's allowed item list. * * @return a set of item strings */ public Set<String> getAllowedItems(); /** * Adds an item to the gate's allowed item list. * * @param item the item to add * @return true if the item was added, false if it already exists * @throws GateException if the item is invalid */ public boolean addAllowedItem(String item) throws GateException; /** * Removes an item from the gate's allowed item list. * * @param item the item to remove * @return true if the item was removed, false if it doesn't exist * @throws GateException if the item is invalid */ public boolean removeAllowedItem(String item) throws GateException; /** * Removes all the items from the gate's allowed item list. */ public void removeAllAllowedItems(); /** * Returns the map of items that are in the gate's replace item map. * * @return a map of item strings */ public Map<String,String> getReplaceItems(); /** * Adds an item mapping to the gate's replace item map. * * @param fromItem the item to replace * @param toItem the item to substitute * @return true if the item was added, false if it already exists * @throws GateException if either item is invalid */ public boolean addReplaceItem(String fromItem, String toItem) throws GateException; /** * Removes an item from the gate's replace item map. * * @param fromItem the item to remove * @return true if the item was removed, false if it doesn't exist * @throws GateException if the item is invalid */ public boolean removeReplaceItem(String fromItem) throws GateException; /** * Removes all the items from the gate's replace item map. */ public void removeAllReplaceItems(); /** * Returns the set of potion effects that are in the gate's banned potion effect list. * * @return a set of potion effect strings */ public Set<String> getBannedPotions(); /** * Adds a potion effect to the gate's banned potion effect list. * * @param potion the potion effect to add * @return true if the potion effect was added, false if it already exists * @throws GateException if the potion effect is invalid */ public boolean addBannedPotion(String potion) throws GateException; /** * Removes a potion effect from the gate's banned potion effect list. * * @param potion the potion effect to remove * @return true if the potion effect was removed, false if it doesn't exist * @throws GateException if the potion effect is invalid */ public boolean removeBannedPotion(String potion) throws GateException; /** * Removes all the potion effects from the gate's banned potion effect list. */ public void removeAllBannedPotions(); /** * Returns the set of potion effects that are in the gate's allowed potion effect list. * * @return a set of potion effect strings */ public Set<String> getAllowedPotions(); /** * Adds a potion effect to the gate's allowed potion effect list. * * @param potion the potion effect to add * @return true if the potion effect was added, false if it already exists * @throws GateException if the potion effect is invalid */ public boolean addAllowedPotion(String potion) throws GateException; /** * Removes a potion effect from the gate's allowed potion effect list. * * @param potion the potion effect to remove * @return true if the potion effect was removed, false if it doesn't exist * @throws GateException if the potion effect is invalid */ public boolean removeAllowedPotion(String potion) throws GateException; /** * Removes all the potion effects from the gate's allowed potion effect list. */ public void removeAllAllowedPotions(); /** * Returns the map of potion effects that are in the gate's replace potion effect map. * * @return a map of potion effect strings */ public Map<String,String> getReplacePotions(); /** * Adds a potion effect mapping to the gate's replace potion effect map. * * @param fromPotion the potion effect to replace * @param toPotion the potion effect to substitute * @return true if the potion effect was added, false if it already exists * @throws GateException if either potion effect is invalid */ public boolean addReplacePotion(String fromPotion, String toPotion) throws GateException; /** * Removes a potion effect from the gate's replace potion effect map. * * @param fromPotion the potion effect to remove * @return true if the potion effect was removed, false if it doesn't exist * @throws GateException if the potion effect is invalid */ public boolean removeReplacePotion(String fromPotion) throws GateException; /** * Removes all the potion effects from the gate's replace potion effect map. */ public void removeAllReplacePotions(); /* Options */ /** * Returns the value of the "duration" option. * * @return the option value */ public int getDuration(); /** * Sets the value of the "duration" option. * * @param i the option value */ public void setDuration(int i); /** * Returns the value of the "direction" option. * * @return the option value */ public BlockFace getDirection(); /** * Sets the value of the "direction" option. * * @param i the option value */ public void setDirection(BlockFace dir); /** * Returns the value of the "linkLocal" option. * * @return the option value */ public boolean getLinkLocal(); /** * Sets the value of the "linkLocal" option. * * @param b the option value */ public void setLinkLocal(boolean b); /** * Returns the value of the "linkWorld" option. * * @return the option value */ public boolean getLinkWorld(); /** * Sets the value of the "linkWorld" option. * * @param b the option value */ public void setLinkWorld(boolean b); /** * Returns the value of the "linkServer" option. * * @return the option value */ public boolean getLinkServer(); /** * Sets the value of the "linkServer" option. * * @param b the option value */ public void setLinkServer(boolean b); /** * Returns the value of the "linkNoneFormat" option. * * @return the option value */ public String getLinkNoneFormat(); /** * Sets the value of the "linkNoneFormat" option. * * @param s the option value */ public void setLinkNoneFormat(String s); /** * Returns the value of the "linkUnselectedFormat" option. * * @return the option value */ public String getLinkUnselectedFormat(); /** * Sets the value of the "linkUnselectedFormat" option. * * @param s the option value */ public void setLinkUnselectedFormat(String s); /** * Returns the value of the "linkOfflineFormat" option. * * @return the option value */ public String getLinkOfflineFormat(); /** * Sets the value of the "linkOfflineFormat" option. * * @param s the option value */ public void setLinkOfflineFormat(String s); /** * Returns the value of the "linkLocalFormat" option. * * @return the option value */ public String getLinkLocalFormat(); /** * Sets the value of the "linkLocalFormat" option. * * @param s the option value */ public void setLinkLocalFormat(String s); /** * Returns the value of the "linkWorldFormat" option. * * @return the option value */ public String getLinkWorldFormat(); /** * Sets the value of the "linkWorldFormat" option. * * @param s the option value */ public void setLinkWorldFormat(String s); /** * Returns the value of the "linkServerFormat" option. * * @return the option value */ public String getLinkServerFormat(); /** * Sets the value of the "linkServerFormat" option. * * @param s the option value */ public void setLinkServerFormat(String s); /** * Returns the value of the "multiLink" option. * * @return the option value */ public boolean getMultiLink(); /** * Sets the value of the "multiLink" option. * * @param b the option value */ public void setMultiLink(boolean b); /** * Returns the value of the "protect" option. * * @return the option value */ public boolean getProtect(); /** * Sets the value of the "protect" option. * * @param b the option value */ public void setProtect(boolean b); /** * Returns the value of the "requirePin" option. * * @return the option value */ public boolean getRequirePin(); /** * Sets the value of the "requirePin" option. * * @param b the option value */ public void setRequirePin(boolean b); /** * Returns the value of the "requireValidPin" option. * * @return the option value */ public boolean getRequireValidPin(); /** * Sets the value of the "requireValidPin" option. * * @param b the option value */ public void setRequireValidPin(boolean b); /** * Returns the value of the "requireLevel" option. * * @return the option value */ public int getRequireLevel(); /** * Sets the value of the "requireLevel" option. * * @param i the option value */ public void setRequireLevel(int i); /** * Returns the value of the "invalidPinDamage" option. * * @return the option value */ public int getInvalidPinDamage(); /** * Sets the value of the "invalidPinDamage" option. * * @param i the option value */ public void setInvalidPinDamage(int i); /** * Returns the value of the "sendChat" option. * * @return the option value */ public boolean getSendChat(); /** * Sets the value of the "sendChat" option. * * @param b the option value */ public void setSendChat(boolean b); /** * Returns the value of the "sendChatFilter" option. * * @return the option value */ public String getSendChatFilter(); /** * Sets the "sendChatFilter" option. * * @param s the option value */ public void setSendChatFilter(String s); /** * Returns the value of the "sendChatFormatFilter" option. * * @return the option value */ public String getSendChatFormatFilter(); /** * Sets the "sendChatFormatFilter" option. * * @param s the option value */ public void setSendChatFormatFilter(String s); /** * Returns the value of the "sendChatDistance" option. * * @return the option value */ public int getSendChatDistance(); /** * Sets the value of the "sendChatDistance" option. * * @param i the option value */ public void setSendChatDistance(int i); /** * Returns the value of the "receiveChat" option. * * @return the option value */ public boolean getReceiveChat(); /** * Sets the value of the "receiveChat" option. * * @param b the option value */ public void setReceiveChat(boolean b); /** * Returns the value of the "receiveChatFilter" option. * * @return the option value */ public String getReceiveChatFilter(); /** * Sets the "receiveChatFilter" option. * * @param s the option value */ public void setReceiveChatFilter(String s); /** * Returns the value of the "receiveChatDistance" option. * * @return the option value */ public int getReceiveChatDistance(); /** * Sets the value of the "receiveChatDistance" option. * * @param i the option value */ public void setReceiveChatDistance(int i); /** * Returns the value of the "requireAllowedItems" option. * * @return the option value */ public boolean getRequireAllowedItems(); /** * Sets the value of the "requireAllowedItems" option. * * @param b the option value */ public void setRequireAllowedItems(boolean b); /** * Returns the value of the "receiveInventory" option. * * @return the option value */ public boolean getReceiveInventory(); /** * Sets the value of the "receiveInventory" option. * * @param b the option value */ public void setReceiveInventory(boolean b); /** * Returns the value of the "deleteInventory" option. * * @return the option value */ public boolean getDeleteInventory(); /** * Sets the value of the "deleteInventory" option. * * @param b the option value */ public void setDeleteInventory(boolean b); /** * Returns the value of the "receiveGameMode" option. * * @return the option value */ public boolean getReceiveGameMode(); /** * Sets the value of the "receiveGameMode" option. * * @param b the option value */ public void setReceiveGameMode(boolean b); /** * Returns the value of the "allowGameModes" option. * * @return the option value */ public String getAllowGameModes(); /** * Sets the value of the "allowGameModes" option. * * @param s the option value */ public void setAllowGameModes(String s); /** * Returns the value of the "gameMode" option. * * @return the option value */ public GameMode getGameMode(); /** * Sets the value of the "gameMode" option. * * @param m the option value */ public void setGameMode(GameMode m); /** * Returns the value of the "receiveXP" option. * * @return the option value */ public boolean getReceiveXP(); /** * Sets the value of the "receiveXP" option. * * @param b the option value */ public void setReceiveXP(boolean b); /** * Returns the value of the "receivePotions" option. * * @return the option value */ public boolean getReceivePotions(); /** * Sets the value of the "receivePotions" option. * * @param b the option value */ public void setReceivePotions(boolean b); /** * Returns the value of the "requireAllowedPotions" option. * * @return the option value */ public boolean getRequireAllowedPotions(); /** * Sets the value of the "requireAllowedPotions" option. * * @param b the option value */ public void setRequireAllowedPotions(boolean b); /** * Returns the value of the "receiveStats" option. * * @return the option value */ public boolean getReceiveStats(); /** * Sets the value of the "receiveStats" option. * * @param b the option value */ public void setReceiveStats(boolean b); /** * Returns the value of the "randomNextLink" option. * * @return the option value */ public boolean getRandomNextLink(); /** * Sets the value of the "randomNextLink" option. * * @param b the option value */ public void setRandomNextLink(boolean b); /** * Returns the value of the "sendNextLink" option. * * @return the option value */ public boolean getSendNextLink(); /** * Sets the value of the "sendNextLink" option. * * @param b the option value */ public void setSendNextLink(boolean b); /** * Returns the value of the "teleportFormat" option. * * @return the option value */ public String getTeleportFormat(); /** * Sets the value of the "teleportFormat" option. * * @param s the option value */ public void setTeleportFormat(String s); /** * Returns the value of the "noLinksFormat" option. * * @return the option value */ public String getNoLinksFormat(); /** * Sets the value of the "noLinksFormat" option. * * @param s the option value */ public void setNoLinksFormat(String s); /** * Returns the value of the "noLinkSelectedFormat" option. * * @return the option value */ public String getNoLinkSelectedFormat(); /** * Sets the value of the "noLinkSelectedFormat" option. * * @param s the option value */ public void setNoLinkSelectedFormat(String s); /** * Returns the value of the "invalidLinkFormat" option. * * @return the option value */ public String getInvalidLinkFormat(); /** * Sets the value of the "invalidLinkFormat" option. * * @param s the option value */ public void setInvalidLinkFormat(String s); /** * Returns the value of the "unknownLinkFormat" option. * * @return the option value */ public String getUnknownLinkFormat(); /** * Sets the value of the "unknownLinkFormat" option. * * @param s the option value */ public void setUnknownLinkFormat(String s); /** * Returns the value of the "markerFormat" option. * * @return the option value */ public String getMarkerFormat(); /** * Sets the value of the "markerFormat" option. * * @param s the option value */ public void setMarkerFormat(String s); /** * Returns the value of the "countdown" option. * * @return the option value */ public int getCountdown(); /** * Sets the value of the "countdown" option. * * @param i the option value */ public void setCountdown(int i); /** * Returns the value of the "countdownInterval" option. * * @return the option value */ public int getCountdownInterval(); /** * Sets the value of the "countdownInterval" option. * * @param i the option value */ public void setCountdownInterval(int i); /** * Returns the value of the "countdownFormat" option. * * @return the option value */ public String getCountdownFormat(); /** * Sets the value of the "countdownFormat" option. * * @param s the option value */ public void setCountdownFormat(String s); /** * Returns the value of the "countdownIntervalFormat" option. * * @return the option value */ public String getCountdownIntervalFormat(); /** * Sets the value of the "countdownIntervalFormat" option. * * @param s the option value */ public void setCountdownIntervalFormat(String s); /** * Returns the value of the "countdownCancelFormat" option. * * @return the option value */ public String getCountdownCancelFormat(); /** * Sets the value of the "countdownCancelFormat" option. * * @param s the option value */ public void setCountdownCancelFormat(String s); /** * Returns the value of the "linkLocalCost" option. * * @return the option value */ public double getLinkLocalCost(); /** * Sets the value of the "linkLocalCost" option. * * @param cost the option value */ public void setLinkLocalCost(double cost); /** * Returns the value of the "linkWorldCost" option. * * @return the option value */ public double getLinkWorldCost(); /** * Sets the value of the "linkWorldCost" option. * * @param cost the option value */ public void setLinkWorldCost(double cost); /** * Returns the value of the "linkServerCost" option. * * @return the option value */ public double getLinkServerCost(); /** * Sets the value of the "linkServerCost" option. * * @param cost the option value */ public void setLinkServerCost(double cost); /** * Returns the value of the "sendLocalCost" option. * * @return the option value */ public double getSendLocalCost(); /** * Sets the value of the "sendLocalCost" option. * * @param cost the option value */ public void setSendLocalCost(double cost); /** * Returns the value of the "sendWorldCost" option. * * @return the option value */ public double getSendWorldCost(); /** * Sets the value of the "sendWorldCost" option. * * @param cost the option value */ public void setSendWorldCost(double cost); /** * Returns the value of the "sendServerCost" option. * * @return the option value */ public double getSendServerCost(); /** * Sets the value of the "sendServerCost" option. * * @param cost the option value */ public void setSendServerCost(double cost); /** * Returns the value of the "receiveLocalCost" option. * * @return the option value */ public double getReceiveLocalCost(); /** * Sets the value of the "receiveLocalCost" option. * * @param cost the option value */ public void setReceiveLocalCost(double cost); /** * Returns the value of the "receiveWorldCost" option. * * @return the option value */ public double getReceiveWorldCost(); /** * Sets the value of the "receiveWorldCost" option. * * @param cost the option value */ public void setReceiveWorldCost(double cost); /** * Returns the value of the "receiveServerCost" option. * * @return the option value */ public double getReceiveServerCost(); /** * Sets the value of the "receiveServerCost" option. * * @param cost the option value */ public void setReceiveServerCost(double cost); /** * Returns the value of the "linkAddDistance" option. * * @return the option value */ public int getLinkAddDistance(); /** * Sets the value of the "linkAddDistance" option. * * @param i the option value */ public void setLinkAddDistance(int i); /** * Returns the value of the "hidden" option. * * @return the option value */ @Override public boolean getHidden(); /** * Sets the value of the "hidden" option. * * @param b the option value */ public void setHidden(boolean b); /* End Options */ }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.googlecode.jtype; import java.io.Serializable; import java.lang.reflect.GenericArrayType; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.TypeVariable; import java.lang.reflect.WildcardType; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static com.googlecode.jtype.Utils.checkFalse; import static com.googlecode.jtype.Utils.checkNotNull; import static com.googlecode.jtype.Utils.checkTrue; /** * Provides a generic type literal. * <p> * This class captures the actual type argument used when subclassed. This allows it to be referenced as a type * parameter at compile time and also makes it available at run time. It is intended to be used as follows: * <p> * {@code Generic<List<String>> listStringType = new Generic<List<String>>() }<code>{};</code> * <p> * This allows generic type literals to be used in a simple manner as standard class literals. For example, consider the * following generic method signature: * <p> * {@code <T> void add(T element, Class<T> type)} * <p> * A problem arises when {@code <T>} is a generic type, such as {@code List<String>}, since {@code List<String>.class} * produces a compile time error. Use of this class can mitigate this problem: * <p> * {@code <T> void add(T element, Generic<T> type)} * <p> * Which can then be invoked as follows: * <p> * {@code add(new ArrayList<String>(), new Generic<List<String>>() }<code>{});</code> * * @author Mark Hobson * @param <T> the type that this generic type literal represents * @see Generics * @see <a href="http://gafter.blogspot.com/2006/12/super-type-tokens.html">Neal Gafter's blog: Super Type Tokens</a> */ public abstract class Generic<T> implements Serializable { // classes ---------------------------------------------------------------- private static final class DefaultGeneric<T> extends Generic<T> { public DefaultGeneric(Type type) { super(type); } } /** * Simple read-only cache for common generics. Implemented as an inner class for lazy instantiation. */ private static class GenericCache { private static final Map<Type, Generic<?>> GENERICS_BY_TYPE = createCache(); public static Generic<?> get(Type type) { return GENERICS_BY_TYPE.get(type); } } // constants -------------------------------------------------------------- private static final long serialVersionUID = 1L; // fields ----------------------------------------------------------------- /** * The type that this generic type literal represents. * * @serial */ private final Type type; // constructors ----------------------------------------------------------- protected Generic() { Type type = getActualTypeArgument(); validateType(type); this.type = type; } Generic(Type type) { validateType(type); this.type = type; } // public methods --------------------------------------------------------- public Type getType() { return type; } @SuppressWarnings("unchecked") public Class<? super T> getRawType() { return (Class<? super T>) TypeUtils.getErasedReferenceType(type); } public String toUnqualifiedString() { return TypeUtils.toUnqualifiedString(type); } public static <T> Generic<T> get(Class<T> klass) { // guaranteed by definition @SuppressWarnings("unchecked") Generic<T> generic = (Generic<T>) get((Type) klass); return generic; } public static Generic<?> get(Type type) { Generic<?> generic = GenericCache.get(type); if (generic == null) { generic = create(type); } return generic; } @SuppressWarnings("unchecked") public static <T> Generic<? extends T> get(Class<T> rawType, Type... actualTypeArguments) { if (actualTypeArguments == null || actualTypeArguments.length == 0) { return get(rawType); } ParameterizedType paramType = Types.parameterizedType(rawType, actualTypeArguments); return (Generic<? extends T>) get(paramType); } public static Generic<?> valueOf(String typeName) { return get(Types.valueOf(typeName)); } // Object methods --------------------------------------------------------- /** * {@inheritDoc} */ @Override public int hashCode() { return type.hashCode(); } /** * {@inheritDoc} */ @Override public boolean equals(Object object) { if (!(object instanceof Generic<?>)) { return false; } Generic<?> generic = (Generic<?>) object; return type.equals(generic.getType()); } /** * {@inheritDoc} */ @Override public String toString() { return TypeUtils.toString(type); } // private methods -------------------------------------------------------- private static Map<Type, Generic<?>> createCache() { Map<Type, Generic<?>> genericsByType = new HashMap<Type, Generic<?>>(); putCacheEntry(genericsByType, Object.class); putCacheEntry(genericsByType, Boolean.class); putCacheEntry(genericsByType, Byte.class); putCacheEntry(genericsByType, Character.class); putCacheEntry(genericsByType, Double.class); putCacheEntry(genericsByType, Float.class); putCacheEntry(genericsByType, Integer.class); putCacheEntry(genericsByType, Long.class); putCacheEntry(genericsByType, Short.class); putCacheEntry(genericsByType, String.class); return Collections.unmodifiableMap(genericsByType); } private static void putCacheEntry(Map<Type, Generic<?>> genericsByType, Type type) { genericsByType.put(type, create(type)); } private static Generic<Object> create(Type type) { return new DefaultGeneric<Object>(type); } private static void validateType(Type type) { checkNotNull(type, "type"); checkFalse(type instanceof TypeVariable<?>, "Type variables are not supported: ", type); checkFalse(type instanceof WildcardType, "Wildcard types are not supported: ", type); checkTrue(type instanceof Class<?> || type instanceof ParameterizedType || type instanceof GenericArrayType, "Unsupported type: ", type); } private Type getActualTypeArgument() { if (getClass().getSuperclass() != Generic.class) { throw new IllegalStateException("Generic must only be subclassed once"); } Type superclass = getClass().getGenericSuperclass(); return ((ParameterizedType) superclass).getActualTypeArguments()[0]; } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.parts.impl; // Start of user code for imports import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart; import org.eclipse.emf.eef.runtime.context.impl.EObjectPropertiesEditionContext; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.impl.parts.CompositePropertiesEditionPart; import org.eclipse.emf.eef.runtime.policies.PropertiesEditingPolicy; import org.eclipse.emf.eef.runtime.providers.PropertiesEditingProvider; import org.eclipse.emf.eef.runtime.ui.parts.PartComposer; import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener; import org.eclipse.emf.eef.runtime.ui.widgets.TabElementTreeSelectionDialog; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.wso2.developerstudio.eclipse.gmf.esb.parts.APIResourceInputConnectorPropertiesEditionPart; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages; // End of user code /** * * */ public class APIResourceInputConnectorPropertiesEditionPartImpl extends CompositePropertiesEditionPart implements ISWTPropertiesEditionPart, APIResourceInputConnectorPropertiesEditionPart { protected ReferencesTable incomingLinks; protected List<ViewerFilter> incomingLinksBusinessFilters = new ArrayList<ViewerFilter>(); protected List<ViewerFilter> incomingLinksFilters = new ArrayList<ViewerFilter>(); /** * Default constructor * @param editionComponent the {@link IPropertiesEditionComponent} that manage this part * */ public APIResourceInputConnectorPropertiesEditionPartImpl(IPropertiesEditionComponent editionComponent) { super(editionComponent); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createFigure(org.eclipse.swt.widgets.Composite) * */ public Composite createFigure(final Composite parent) { view = new Composite(parent, SWT.NONE); GridLayout layout = new GridLayout(); layout.numColumns = 3; view.setLayout(layout); createControls(view); return view; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createControls(org.eclipse.swt.widgets.Composite) * */ public void createControls(Composite view) { CompositionSequence aPIResourceInputConnectorStep = new BindingCompositionSequence(propertiesEditionComponent); aPIResourceInputConnectorStep .addStep(EsbViewsRepository.APIResourceInputConnector.Properties.class) .addStep(EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks); composer = new PartComposer(aPIResourceInputConnectorStep) { @Override public Composite addToPart(Composite parent, Object key) { if (key == EsbViewsRepository.APIResourceInputConnector.Properties.class) { return createPropertiesGroup(parent); } if (key == EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks) { return createIncomingLinksAdvancedReferencesTable(parent); } return parent; } }; composer.compose(view); } /** * */ protected Composite createPropertiesGroup(Composite parent) { Group propertiesGroup = new Group(parent, SWT.NONE); propertiesGroup.setText(EsbMessages.APIResourceInputConnectorPropertiesEditionPart_PropertiesGroupLabel); GridData propertiesGroupData = new GridData(GridData.FILL_HORIZONTAL); propertiesGroupData.horizontalSpan = 3; propertiesGroup.setLayoutData(propertiesGroupData); GridLayout propertiesGroupLayout = new GridLayout(); propertiesGroupLayout.numColumns = 3; propertiesGroup.setLayout(propertiesGroupLayout); return propertiesGroup; } /** * */ protected Composite createIncomingLinksAdvancedReferencesTable(Composite parent) { String label = getDescription(EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks, EsbMessages.APIResourceInputConnectorPropertiesEditionPart_IncomingLinksLabel); this.incomingLinks = new ReferencesTable(label, new ReferencesTableListener() { public void handleAdd() { addIncomingLinks(); } public void handleEdit(EObject element) { editIncomingLinks(element); } public void handleMove(EObject element, int oldIndex, int newIndex) { moveIncomingLinks(element, oldIndex, newIndex); } public void handleRemove(EObject element) { removeFromIncomingLinks(element); } public void navigateTo(EObject element) { } }); this.incomingLinks.setHelpText(propertiesEditionComponent.getHelpContent(EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks, EsbViewsRepository.SWT_KIND)); this.incomingLinks.createControls(parent); this.incomingLinks.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (e.item != null && e.item.getData() instanceof EObject) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(APIResourceInputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData())); } } }); GridData incomingLinksData = new GridData(GridData.FILL_HORIZONTAL); incomingLinksData.horizontalSpan = 3; this.incomingLinks.setLayoutData(incomingLinksData); this.incomingLinks.disableMove(); incomingLinks.setID(EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks); incomingLinks.setEEFType("eef::AdvancedReferencesTable"); //$NON-NLS-1$ return parent; } /** * */ protected void addIncomingLinks() { TabElementTreeSelectionDialog dialog = new TabElementTreeSelectionDialog(incomingLinks.getInput(), incomingLinksFilters, incomingLinksBusinessFilters, "incomingLinks", propertiesEditionComponent.getEditingContext().getAdapterFactory(), current.eResource()) { @Override public void process(IStructuredSelection selection) { for (Iterator<?> iter = selection.iterator(); iter.hasNext();) { EObject elem = (EObject) iter.next(); propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(APIResourceInputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, elem)); } incomingLinks.refresh(); } }; dialog.open(); } /** * */ protected void moveIncomingLinks(EObject element, int oldIndex, int newIndex) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(APIResourceInputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex)); incomingLinks.refresh(); } /** * */ protected void removeFromIncomingLinks(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(APIResourceInputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element)); incomingLinks.refresh(); } /** * */ protected void editIncomingLinks(EObject element) { EObjectPropertiesEditionContext context = new EObjectPropertiesEditionContext(propertiesEditionComponent.getEditingContext(), propertiesEditionComponent, element, adapterFactory); PropertiesEditingProvider provider = (PropertiesEditingProvider)adapterFactory.adapt(element, PropertiesEditingProvider.class); if (provider != null) { PropertiesEditingPolicy policy = provider.getPolicy(context); if (policy != null) { policy.execute(); incomingLinks.refresh(); } } } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void firePropertiesChanged(IPropertiesEditionEvent event) { // Start of user code for tab synchronization // End of user code } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.APIResourceInputConnectorPropertiesEditionPart#initIncomingLinks(org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings) */ public void initIncomingLinks(ReferencesTableSettings settings) { if (current.eResource() != null && current.eResource().getResourceSet() != null) this.resourceSet = current.eResource().getResourceSet(); ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider(); incomingLinks.setContentProvider(contentProvider); incomingLinks.setInput(settings); incomingLinksBusinessFilters.clear(); incomingLinksFilters.clear(); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.APIResourceInputConnector.Properties.incomingLinks); if (eefElementEditorReadOnlyState && incomingLinks.getTable().isEnabled()) { incomingLinks.setEnabled(false); incomingLinks.setToolTipText(EsbMessages.APIResourceInputConnector_ReadOnly); } else if (!eefElementEditorReadOnlyState && !incomingLinks.getTable().isEnabled()) { incomingLinks.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.APIResourceInputConnectorPropertiesEditionPart#updateIncomingLinks() * */ public void updateIncomingLinks() { incomingLinks.refresh(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.APIResourceInputConnectorPropertiesEditionPart#addFilterIncomingLinks(ViewerFilter filter) * */ public void addFilterToIncomingLinks(ViewerFilter filter) { incomingLinksFilters.add(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.APIResourceInputConnectorPropertiesEditionPart#addBusinessFilterIncomingLinks(ViewerFilter filter) * */ public void addBusinessFilterToIncomingLinks(ViewerFilter filter) { incomingLinksBusinessFilters.add(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.APIResourceInputConnectorPropertiesEditionPart#isContainedInIncomingLinksTable(EObject element) * */ public boolean isContainedInIncomingLinksTable(EObject element) { return ((ReferencesTableSettings)incomingLinks.getInput()).contains(element); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle() * */ public String getTitle() { return EsbMessages.APIResourceInputConnector_Part_Title; } // Start of user code additional methods // End of user code }
/** */ package CIM.IEC61970.Outage.util; import CIM.Element; import CIM.IEC61970.Core.BasicIntervalSchedule; import CIM.IEC61970.Core.IdentifiedObject; import CIM.IEC61970.Core.IrregularIntervalSchedule; import CIM.IEC61970.Outage.*; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.util.Switch; /** * <!-- begin-user-doc --> * The <b>Switch</b> for the model's inheritance hierarchy. * It supports the call {@link #doSwitch(EObject) doSwitch(object)} * to invoke the <code>caseXXX</code> method for each class of the model, * starting with the actual class of the object * and proceeding up the inheritance hierarchy * until a non-null result is returned, * which is the result of the switch. * <!-- end-user-doc --> * @see CIM.IEC61970.Outage.OutagePackage * @generated */ public class OutageSwitch<T> extends Switch<T> { /** * The cached model package * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected static OutagePackage modelPackage; /** * Creates an instance of the switch. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public OutageSwitch() { if (modelPackage == null) { modelPackage = OutagePackage.eINSTANCE; } } /** * Checks whether this is a switch for the given package. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param ePackage the package in question. * @return whether this is a switch for the given package. * @generated */ @Override protected boolean isSwitchFor(EPackage ePackage) { return ePackage == modelPackage; } /** * Calls <code>caseXXX</code> for each class of the model until one returns a non null result; it yields that result. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the first non-null result returned by a <code>caseXXX</code> call. * @generated */ @Override protected T doSwitch(int classifierID, EObject theEObject) { switch (classifierID) { case OutagePackage.OUTAGE_SCHEDULE: { OutageSchedule outageSchedule = (OutageSchedule)theEObject; T result = caseOutageSchedule(outageSchedule); if (result == null) result = caseIrregularIntervalSchedule(outageSchedule); if (result == null) result = caseBasicIntervalSchedule(outageSchedule); if (result == null) result = caseIdentifiedObject(outageSchedule); if (result == null) result = caseElement(outageSchedule); if (result == null) result = defaultCase(theEObject); return result; } case OutagePackage.CLEARANCE_TAG_TYPE: { ClearanceTagType clearanceTagType = (ClearanceTagType)theEObject; T result = caseClearanceTagType(clearanceTagType); if (result == null) result = caseIdentifiedObject(clearanceTagType); if (result == null) result = caseElement(clearanceTagType); if (result == null) result = defaultCase(theEObject); return result; } case OutagePackage.CLEARANCE_TAG: { ClearanceTag clearanceTag = (ClearanceTag)theEObject; T result = caseClearanceTag(clearanceTag); if (result == null) result = caseIdentifiedObject(clearanceTag); if (result == null) result = caseElement(clearanceTag); if (result == null) result = defaultCase(theEObject); return result; } case OutagePackage.SWITCHING_OPERATION: { SwitchingOperation switchingOperation = (SwitchingOperation)theEObject; T result = caseSwitchingOperation(switchingOperation); if (result == null) result = caseIdentifiedObject(switchingOperation); if (result == null) result = caseElement(switchingOperation); if (result == null) result = defaultCase(theEObject); return result; } default: return defaultCase(theEObject); } } /** * Returns the result of interpreting the object as an instance of '<em>Schedule</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Schedule</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseOutageSchedule(OutageSchedule object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Clearance Tag Type</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Clearance Tag Type</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseClearanceTagType(ClearanceTagType object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Clearance Tag</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Clearance Tag</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseClearanceTag(ClearanceTag object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Switching Operation</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Switching Operation</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseSwitchingOperation(SwitchingOperation object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Element</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Element</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseElement(Element object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Identified Object</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Identified Object</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseIdentifiedObject(IdentifiedObject object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Basic Interval Schedule</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Basic Interval Schedule</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseBasicIntervalSchedule(BasicIntervalSchedule object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>Irregular Interval Schedule</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Irregular Interval Schedule</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */ public T caseIrregularIntervalSchedule(IrregularIntervalSchedule object) { return null; } /** * Returns the result of interpreting the object as an instance of '<em>EObject</em>'. * <!-- begin-user-doc --> * This implementation returns null; * returning a non-null result will terminate the switch, but this is the last case anyway. * <!-- end-user-doc --> * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>EObject</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) * @generated */ @Override public T defaultCase(EObject object) { return null; } } //OutageSwitch
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mapreduce; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.List; import java.util.concurrent.atomic.AtomicLong; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.mapreduce.WALInputFormat.WALKeyRecordReader; import org.apache.hadoop.hbase.mapreduce.WALInputFormat.WALRecordReader; import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.testclassification.MapReduceTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.MapReduceTestUtil; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; /** * JUnit tests for the WALRecordReader */ @Category({MapReduceTests.class, MediumTests.class}) public class TestWALRecordReader { private static final Log LOG = LogFactory.getLog(TestWALRecordReader.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Configuration conf; private static FileSystem fs; private static Path hbaseDir; // visible for TestHLogRecordReader static final TableName tableName = TableName.valueOf(getName()); private static final byte [] rowName = tableName.getName(); // visible for TestHLogRecordReader static final HRegionInfo info = new HRegionInfo(tableName, Bytes.toBytes(""), Bytes.toBytes(""), false); private static final byte [] family = Bytes.toBytes("column"); private static final byte [] value = Bytes.toBytes("value"); private static HTableDescriptor htd; private static Path logDir; private static String getName() { return "TestWALRecordReader"; } @Before public void setUp() throws Exception { FileStatus[] entries = fs.listStatus(hbaseDir); for (FileStatus dir : entries) { fs.delete(dir.getPath(), true); } } @BeforeClass public static void setUpBeforeClass() throws Exception { // Make block sizes small. conf = TEST_UTIL.getConfiguration(); conf.setInt("dfs.blocksize", 1024 * 1024); conf.setInt("dfs.replication", 1); TEST_UTIL.startMiniDFSCluster(1); conf = TEST_UTIL.getConfiguration(); fs = TEST_UTIL.getDFSCluster().getFileSystem(); hbaseDir = TEST_UTIL.createRootDir(); logDir = new Path(hbaseDir, HConstants.HREGION_LOGDIR_NAME); htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor(family)); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } /** * Test partial reads from the log based on passed time range * @throws Exception */ @Test public void testPartialRead() throws Exception { final WALFactory walfactory = new WALFactory(conf, null, getName()); WAL log = walfactory.getWAL(info.getEncodedNameAsBytes()); // This test depends on timestamp being millisecond based and the filename of the WAL also // being millisecond based. long ts = System.currentTimeMillis(); WALEdit edit = new WALEdit(); final AtomicLong sequenceId = new AtomicLong(0); edit.add(new KeyValue(rowName, family, Bytes.toBytes("1"), ts, value)); log.append(htd, info, getWalKey(ts), edit, sequenceId, true, null); edit = new WALEdit(); edit.add(new KeyValue(rowName, family, Bytes.toBytes("2"), ts+1, value)); log.append(htd, info, getWalKey(ts+1), edit, sequenceId, true, null); log.sync(); LOG.info("Before 1st WAL roll " + log.toString()); log.rollWriter(); LOG.info("Past 1st WAL roll " + log.toString()); Thread.sleep(1); long ts1 = System.currentTimeMillis(); edit = new WALEdit(); edit.add(new KeyValue(rowName, family, Bytes.toBytes("3"), ts1+1, value)); log.append(htd, info, getWalKey(ts1+1), edit, sequenceId, true, null); edit = new WALEdit(); edit.add(new KeyValue(rowName, family, Bytes.toBytes("4"), ts1+2, value)); log.append(htd, info, getWalKey(ts1+2), edit, sequenceId, true, null); log.sync(); log.shutdown(); walfactory.shutdown(); LOG.info("Closed WAL " + log.toString()); WALInputFormat input = new WALInputFormat(); Configuration jobConf = new Configuration(conf); jobConf.set("mapreduce.input.fileinputformat.inputdir", logDir.toString()); jobConf.setLong(WALInputFormat.END_TIME_KEY, ts); // only 1st file is considered, and only its 1st entry is used List<InputSplit> splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf)); assertEquals(1, splits.size()); testSplit(splits.get(0), Bytes.toBytes("1")); jobConf.setLong(WALInputFormat.START_TIME_KEY, ts+1); jobConf.setLong(WALInputFormat.END_TIME_KEY, ts1+1); splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf)); // both files need to be considered assertEquals(2, splits.size()); // only the 2nd entry from the 1st file is used testSplit(splits.get(0), Bytes.toBytes("2")); // only the 1nd entry from the 2nd file is used testSplit(splits.get(1), Bytes.toBytes("3")); } /** * Test basic functionality * @throws Exception */ @Test public void testWALRecordReader() throws Exception { final WALFactory walfactory = new WALFactory(conf, null, getName()); WAL log = walfactory.getWAL(info.getEncodedNameAsBytes()); byte [] value = Bytes.toBytes("value"); final AtomicLong sequenceId = new AtomicLong(0); WALEdit edit = new WALEdit(); edit.add(new KeyValue(rowName, family, Bytes.toBytes("1"), System.currentTimeMillis(), value)); long txid = log.append(htd, info, getWalKey(System.currentTimeMillis()), edit, sequenceId, true, null); log.sync(txid); Thread.sleep(1); // make sure 2nd log gets a later timestamp long secondTs = System.currentTimeMillis(); log.rollWriter(); edit = new WALEdit(); edit.add(new KeyValue(rowName, family, Bytes.toBytes("2"), System.currentTimeMillis(), value)); txid = log.append(htd, info, getWalKey(System.currentTimeMillis()), edit, sequenceId, true, null); log.sync(txid); log.shutdown(); walfactory.shutdown(); long thirdTs = System.currentTimeMillis(); // should have 2 log files now WALInputFormat input = new WALInputFormat(); Configuration jobConf = new Configuration(conf); jobConf.set("mapreduce.input.fileinputformat.inputdir", logDir.toString()); // make sure both logs are found List<InputSplit> splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf)); assertEquals(2, splits.size()); // should return exactly one KV testSplit(splits.get(0), Bytes.toBytes("1")); // same for the 2nd split testSplit(splits.get(1), Bytes.toBytes("2")); // now test basic time ranges: // set an endtime, the 2nd log file can be ignored completely. jobConf.setLong(WALInputFormat.END_TIME_KEY, secondTs-1); splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf)); assertEquals(1, splits.size()); testSplit(splits.get(0), Bytes.toBytes("1")); // now set a start time jobConf.setLong(WALInputFormat.END_TIME_KEY, Long.MAX_VALUE); jobConf.setLong(WALInputFormat.START_TIME_KEY, thirdTs); splits = input.getSplits(MapreduceTestingShim.createJobContext(jobConf)); // both logs need to be considered assertEquals(2, splits.size()); // but both readers skip all edits testSplit(splits.get(0)); testSplit(splits.get(1)); } protected WALKey getWalKey(final long sequenceid) { return new WALKey(info.getEncodedNameAsBytes(), tableName, sequenceid); } protected WALRecordReader getReader() { return new WALKeyRecordReader(); } /** * Create a new reader from the split, and match the edits against the passed columns. */ private void testSplit(InputSplit split, byte[]... columns) throws Exception { final WALRecordReader reader = getReader(); reader.initialize(split, MapReduceTestUtil.createDummyMapTaskAttemptContext(conf)); for (byte[] column : columns) { assertTrue(reader.nextKeyValue()); Cell cell = reader.getCurrentValue().getCells().get(0); if (!Bytes.equals(column, 0, column.length, cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength())) { assertTrue( "expected [" + Bytes.toString(column) + "], actual [" + Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()) + "]", false); } } assertFalse(reader.nextKeyValue()); reader.close(); } }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package git4idea.history; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.FilePathImpl; import com.intellij.openapi.vcs.VcsConfiguration; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.annotate.ShowAllAffectedGenericAction; import com.intellij.openapi.vcs.changes.ContentRevision; import com.intellij.openapi.vcs.history.*; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.ArrayUtil; import com.intellij.util.Consumer; import com.intellij.util.Processor; import com.intellij.util.ui.ColumnInfo; import consulo.logging.Logger; import git4idea.GitFileRevision; import git4idea.GitRevisionNumber; import git4idea.GitUtil; import git4idea.GitVcs; import git4idea.changes.GitChangeUtils; import git4idea.config.GitExecutableValidator; import git4idea.history.browser.SHAHash; import git4idea.repo.GitRepository; import git4idea.repo.GitRepositoryManager; import jakarta.inject.Singleton; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.swing.*; import java.util.Collections; import java.util.List; /** * Git history provider implementation */ @Singleton public class GitHistoryProvider implements VcsHistoryProvider, VcsCacheableHistorySessionFactory<Boolean, VcsAbstractHistorySession>, VcsBaseRevisionAdviser { private static final Logger log = Logger.getInstance(GitHistoryProvider.class.getName()); @Nonnull private final Project myProject; public GitHistoryProvider(@Nonnull Project project) { myProject = project; } public VcsDependentHistoryComponents getUICustomization(final VcsHistorySession session, JComponent forShortcutRegistration) { return VcsDependentHistoryComponents.createOnlyColumns(new ColumnInfo[0]); } public AnAction[] getAdditionalActions(Runnable refresher) { return new AnAction[]{ ShowAllAffectedGenericAction.getInstance(), new CopyRevisionNumberAction()}; } public boolean isDateOmittable() { return false; } @Nullable public String getHelpId() { return null; } @Override public FilePath getUsedFilePath(VcsAbstractHistorySession session) { return null; } @Override public Boolean getAddinionallyCachedData(VcsAbstractHistorySession session) { return null; } @Override public VcsAbstractHistorySession createFromCachedData(Boolean aBoolean, @Nonnull List<VcsFileRevision> revisions, @Nonnull FilePath filePath, VcsRevisionNumber currentRevision) { return createSession(filePath, revisions, currentRevision); } @Nullable public VcsHistorySession createSessionFor(final FilePath filePath) throws VcsException { List<VcsFileRevision> revisions = null; try { revisions = GitHistoryUtils.history(myProject, filePath); } catch (VcsException e) { GitVcs.getInstance(myProject).getExecutableValidator().showNotificationOrThrow(e); } return createSession(filePath, revisions, null); } private VcsAbstractHistorySession createSession(final FilePath filePath, final List<VcsFileRevision> revisions, @Nullable final VcsRevisionNumber number) { return new VcsAbstractHistorySession(revisions, number) { @Nullable protected VcsRevisionNumber calcCurrentRevisionNumber() { try { return GitHistoryUtils.getCurrentRevision(myProject, filePath, "HEAD"); } catch (VcsException e) { // likely the file is not under VCS anymore. if (log.isDebugEnabled()) { log.debug("Unable to retrieve the current revision number", e); } return null; } } public HistoryAsTreeProvider getHistoryAsTreeProvider() { return null; } @Override public VcsHistorySession copy() { return createSession(filePath, getRevisionList(), getCurrentRevisionNumber()); } }; } @Override public boolean getBaseVersionContent(FilePath filePath, Processor<CharSequence> processor, final String beforeVersionId, List<String> warnings) throws VcsException { if (StringUtil.isEmptyOrSpaces(beforeVersionId) || filePath.getVirtualFile() == null) return false; // apply if base revision id matches revision final VirtualFile root = GitUtil.getGitRoot(filePath); if (root == null) return false; final SHAHash shaHash = GitChangeUtils.commitExists(myProject, root, beforeVersionId, null, "HEAD"); if (shaHash == null) { throw new VcsException("Can not apply patch to " + filePath.getPath() + ".\nCan not find revision '" + beforeVersionId + "'."); } final ContentRevision content = GitVcs.getInstance(myProject).getDiffProvider() .createFileContent(new GitRevisionNumber(shaHash.getValue()), filePath.getVirtualFile()); if (content == null) { throw new VcsException("Can not load content of '" + filePath.getPath() + "' for revision '" + shaHash.getValue() + "'"); } return ! processor.process(content.getContent()); } public void reportAppendableHistory(final FilePath path, final VcsAppendableHistorySessionPartner partner) throws VcsException { final VcsAbstractHistorySession emptySession = createSession(path, Collections.<VcsFileRevision>emptyList(), null); partner.reportCreatedEmptySession(emptySession); VcsConfiguration vcsConfiguration = VcsConfiguration.getInstance(myProject); String[] additionalArgs = vcsConfiguration.LIMIT_HISTORY ? new String[] { "--max-count=" + vcsConfiguration.MAXIMUM_HISTORY_ROWS } : ArrayUtil.EMPTY_STRING_ARRAY; final GitExecutableValidator validator = GitVcs.getInstance(myProject).getExecutableValidator(); GitHistoryUtils.history(myProject, refreshPath(path), null, new Consumer<GitFileRevision>() { public void consume(GitFileRevision gitFileRevision) { partner.acceptRevision(gitFileRevision); } }, new Consumer<VcsException>() { public void consume(VcsException e) { if (validator.checkExecutableAndNotifyIfNeeded()) { partner.reportException(e); } } }, additionalArgs); } /** * Refreshes the IO File inside this FilePath to let it survive moves. */ @Nonnull private static FilePath refreshPath(@Nonnull FilePath path) { VirtualFile virtualFile = path.getVirtualFile(); if (virtualFile == null) { return path; } return new FilePathImpl(virtualFile); } public boolean supportsHistoryForDirectories() { return true; } @Override public DiffFromHistoryHandler getHistoryDiffHandler() { return new GitDiffFromHistoryHandler(myProject); } @Override public boolean canShowHistoryFor(@Nonnull VirtualFile file) { GitRepositoryManager manager = GitUtil.getRepositoryManager(myProject); GitRepository repository = manager.getRepositoryForFile(file); return repository != null && !repository.isFresh(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.record.sink.kafka; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.TopicPartition; import org.apache.nifi.attribute.expression.language.StandardPropertyValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.PropertyValue; import org.apache.nifi.components.state.StateManager; import org.apache.nifi.controller.ConfigurationContext; import org.apache.nifi.controller.ControllerServiceInitializationContext; import org.apache.nifi.kerberos.KerberosCredentialsService; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processors.kafka.pubsub.KafkaProcessorUtils; import org.apache.nifi.record.sink.RecordSinkService; import org.apache.nifi.reporting.InitializationException; import org.apache.nifi.serialization.RecordSetWriterFactory; import org.apache.nifi.serialization.SimpleRecordSchema; import org.apache.nifi.serialization.record.ListRecordSet; import org.apache.nifi.serialization.record.MapRecord; import org.apache.nifi.serialization.record.MockRecordWriter; import org.apache.nifi.serialization.record.RecordField; import org.apache.nifi.serialization.record.RecordFieldType; import org.apache.nifi.serialization.record.RecordSchema; import org.apache.nifi.serialization.record.RecordSet; import org.apache.nifi.ssl.SSLContextService; import org.apache.nifi.state.MockStateManager; import org.apache.nifi.util.MockControllerServiceInitializationContext; import org.junit.jupiter.api.Test; import org.mockito.ArgumentMatcher; import org.mockito.Mockito; import org.mockito.stubbing.Answer; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Future; import java.util.concurrent.FutureTask; import java.util.concurrent.TimeUnit; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class TestKafkaRecordSink_1_0 { private static final String TOPIC_NAME = "unit-test"; @Test public void testRecordFormat() throws IOException, InitializationException { MockKafkaRecordSink_1_0 task = initTask(); List<RecordField> recordFields = Arrays.asList( new RecordField("field1", RecordFieldType.INT.getDataType()), new RecordField("field2", RecordFieldType.STRING.getDataType()) ); RecordSchema recordSchema = new SimpleRecordSchema(recordFields); Map<String, Object> row1 = new HashMap<>(); row1.put("field1", 15); row1.put("field2", "Hello"); Map<String, Object> row2 = new HashMap<>(); row2.put("field1", 6); row2.put("field2", "World!"); RecordSet recordSet = new ListRecordSet(recordSchema, Arrays.asList( new MapRecord(recordSchema, row1), new MapRecord(recordSchema, row2) )); task.sendData(recordSet, new HashMap<>(), true); assertEquals(1, task.dataSent.size()); String[] lines = new String(task.dataSent.get(0)).split("\n"); assertNotNull(lines); assertEquals(2, lines.length); String[] data = lines[0].split(","); assertEquals("15", data[0]); // In the MockRecordWriter all values are strings assertEquals("Hello", data[1]); data = lines[1].split(","); assertEquals("6", data[0]); assertEquals("World!", data[1]); } private MockKafkaRecordSink_1_0 initTask() throws InitializationException { final ComponentLog logger = mock(ComponentLog.class); final MockKafkaRecordSink_1_0 task = new MockKafkaRecordSink_1_0(); ConfigurationContext context = mock(ConfigurationContext.class); final StateManager stateManager = new MockStateManager(task); final PropertyValue topicValue = Mockito.mock(StandardPropertyValue.class); when(topicValue.evaluateAttributeExpressions()).thenReturn(topicValue); when(topicValue.getValue()).thenReturn(TOPIC_NAME); when(context.getProperty(KafkaRecordSink_1_0.TOPIC)).thenReturn(topicValue); final PropertyValue deliveryValue = Mockito.mock(StandardPropertyValue.class); when(deliveryValue.getValue()).thenReturn(KafkaRecordSink_1_0.DELIVERY_REPLICATED.getValue()); when(context.getProperty(KafkaRecordSink_1_0.DELIVERY_GUARANTEE)).thenReturn(deliveryValue); final PropertyValue maxSizeValue = Mockito.mock(StandardPropertyValue.class); when(maxSizeValue.asDataSize(DataUnit.B)).thenReturn(1024.0); when(context.getProperty(KafkaRecordSink_1_0.MAX_REQUEST_SIZE)).thenReturn(maxSizeValue); final PropertyValue maxAckWaitValue = Mockito.mock(StandardPropertyValue.class); when(maxAckWaitValue.asTimePeriod(TimeUnit.MILLISECONDS)).thenReturn(5000L); when(context.getProperty(KafkaRecordSink_1_0.ACK_WAIT_TIME)).thenReturn(maxAckWaitValue); final PropertyValue charEncodingValue = Mockito.mock(StandardPropertyValue.class); when(charEncodingValue.evaluateAttributeExpressions()).thenReturn(charEncodingValue); when(charEncodingValue.getValue()).thenReturn("UTF-8"); when(context.getProperty(KafkaRecordSink_1_0.MESSAGE_HEADER_ENCODING)).thenReturn(charEncodingValue); final PropertyValue securityValue = Mockito.mock(StandardPropertyValue.class); when(securityValue.getValue()).thenReturn(KafkaProcessorUtils.SEC_SASL_PLAINTEXT.getValue()); when(context.getProperty(KafkaProcessorUtils.SECURITY_PROTOCOL)).thenReturn(securityValue); final PropertyValue jaasValue = Mockito.mock(StandardPropertyValue.class); when(jaasValue.evaluateAttributeExpressions()).thenReturn(jaasValue); when(jaasValue.getValue()).thenReturn(null); when(context.getProperty(KafkaProcessorUtils.JAAS_SERVICE_NAME)).thenReturn(jaasValue); Map<PropertyDescriptor, String> propertyMap = new HashMap<>(); propertyMap.put(KafkaRecordSink_1_0.TOPIC, KafkaRecordSink_1_0.TOPIC.getName()); propertyMap.put(KafkaRecordSink_1_0.DELIVERY_GUARANTEE, KafkaRecordSink_1_0.DELIVERY_GUARANTEE.getName()); propertyMap.put(KafkaRecordSink_1_0.MAX_REQUEST_SIZE, KafkaRecordSink_1_0.MAX_REQUEST_SIZE.getName()); propertyMap.put(KafkaRecordSink_1_0.ACK_WAIT_TIME, KafkaRecordSink_1_0.ACK_WAIT_TIME.getName()); propertyMap.put(KafkaRecordSink_1_0.MESSAGE_HEADER_ENCODING, KafkaRecordSink_1_0.MESSAGE_HEADER_ENCODING.getName()); when(context.getProperties()).thenReturn(propertyMap); final PropertyValue pValue = Mockito.mock(StandardPropertyValue.class); // No header, don't quote values MockRecordWriter writer = new MockRecordWriter(null, false); when(context.getProperty(RecordSinkService.RECORD_WRITER_FACTORY)).thenReturn(pValue); when(pValue.asControllerService(RecordSetWriterFactory.class)).thenReturn(writer); when(context.getProperty(KafkaProcessorUtils.SSL_CONTEXT_SERVICE)).thenReturn(pValue); when(pValue.asControllerService(SSLContextService.class)).thenReturn(null); when(context.getProperty(KafkaProcessorUtils.KERBEROS_CREDENTIALS_SERVICE)).thenReturn(pValue); when(pValue.asControllerService(KerberosCredentialsService.class)).thenReturn(null); final ControllerServiceInitializationContext initContext = new MockControllerServiceInitializationContext(task, UUID.randomUUID().toString(), logger, stateManager); task.initialize(initContext); task.onEnabled(context); return task; } private static class MockKafkaRecordSink_1_0 extends KafkaRecordSink_1_0 { final List<byte[]> dataSent = new ArrayList<>(); @SuppressWarnings("unchecked") @Override protected Producer<byte[], byte[]> createProducer(Map<String, Object> kafkaProperties) { final Producer<byte[], byte[]> mockProducer = (Producer<byte[], byte[]>) mock(Producer.class); when(mockProducer.send(Mockito.argThat(new ByteProducerRecordMatcher()), any(Callback.class))).then( (Answer<Future<RecordMetadata>>) invocationOnMock -> { ProducerRecord<byte[], byte[]> producerRecord = invocationOnMock.getArgument(0); final byte[] data = producerRecord.value(); dataSent.add(data); Callback callback = invocationOnMock.getArgument(1); RecordMetadata recordMetadata = new RecordMetadata( new TopicPartition(producerRecord.topic(), producerRecord.partition() != null ? producerRecord.partition() : 0), 0, data.length, producerRecord.timestamp() != null ? producerRecord.timestamp() : System.currentTimeMillis(), new Long(0L), producerRecord.key() != null ? producerRecord.key().length : 0, data.length); callback.onCompletion(recordMetadata, null); return new FutureTask(() -> {}, recordMetadata); }); return mockProducer; } } private static class ByteProducerRecordMatcher implements ArgumentMatcher<ProducerRecord<byte[], byte[]>> { @Override public boolean matches(ProducerRecord<byte[], byte[]> producer) { return true; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.io.sstable; import java.io.Closeable; import java.io.File; import java.io.IOError; import java.io.IOException; import java.util.Arrays; import java.util.Iterator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.db.DecoratedKey; import org.apache.cassandra.db.columniterator.IColumnIterator; import org.apache.cassandra.db.filter.QueryFilter; import org.apache.cassandra.io.util.BufferedRandomAccessFile; import org.apache.cassandra.utils.ByteBufferUtil; public class SSTableScanner implements Iterator<IColumnIterator>, Closeable { private static Logger logger = LoggerFactory.getLogger(SSTableScanner.class); protected final BufferedRandomAccessFile file; public final SSTableReader sstable; private IColumnIterator row; protected boolean exhausted = false; protected Iterator<IColumnIterator> iterator; private QueryFilter filter; /** * @param sstable SSTable to scan. */ SSTableScanner(SSTableReader sstable, int bufferSize, boolean skipCache) { try { this.file = new BufferedRandomAccessFile(new File(sstable.getFilename()), "r", bufferSize, skipCache); } catch (IOException e) { throw new IOError(e); } this.sstable = sstable; } /** * @param sstable SSTable to scan. * @param filter filter to use when scanning the columns */ SSTableScanner(SSTableReader sstable, QueryFilter filter, int bufferSize) { try { this.file = new BufferedRandomAccessFile(sstable.getFilename(), "r", bufferSize); } catch (IOException e) { throw new IOError(e); } this.sstable = sstable; this.filter = filter; } public void close() throws IOException { file.close(); } public void seekTo(DecoratedKey seekKey) { try { long position = sstable.getPosition(seekKey, SSTableReader.Operator.GE); if (position < 0) { exhausted = true; return; } file.seek(position); row = null; } catch (IOException e) { throw new RuntimeException("corrupt sstable", e); } } public long getFileLength() { try { return file.length(); } catch (IOException e) { throw new IOError(e); } } public long getFilePointer() { return file.getFilePointer(); } public boolean hasNext() { if (iterator == null) iterator = exhausted ? Arrays.asList(new IColumnIterator[0]).iterator() : new KeyScanningIterator(); return iterator.hasNext(); } public IColumnIterator next() { if (iterator == null) iterator = exhausted ? Arrays.asList(new IColumnIterator[0]).iterator() : new KeyScanningIterator(); return iterator.next(); } public void remove() { throw new UnsupportedOperationException(); } protected class KeyScanningIterator implements Iterator<IColumnIterator> { protected long finishedAt; public boolean hasNext() { try { if (row == null) return !file.isEOF(); return finishedAt < file.length(); } catch (IOException e) { throw new RuntimeException(e); } } public IColumnIterator next() { try { if (row != null) file.seek(finishedAt); assert !file.isEOF(); DecoratedKey key = SSTableReader.decodeKey(sstable.partitioner, sstable.descriptor, ByteBufferUtil.readWithShortLength(file)); long dataSize = SSTableReader.readRowSize(file, sstable.descriptor); long dataStart = file.getFilePointer(); finishedAt = dataStart + dataSize; if (filter == null) { row = new SSTableIdentityIterator(sstable, file, key, dataStart, dataSize); return row; } else { return row = filter.getSSTableColumnIterator(sstable, file, key); } } catch (IOException e) { throw new RuntimeException(SSTableScanner.this + " failed to provide next columns from " + this, e); } } public void remove() { throw new UnsupportedOperationException(); } @Override public String toString() { return getClass().getSimpleName() + "(" + "finishedAt:" + finishedAt + ")"; } } @Override public String toString() { return getClass().getSimpleName() + "(" + "file=" + file + " sstable=" + sstable + " exhausted=" + exhausted + ")"; } }
package com.bemoreio.podchief.mail; import android.util.Log; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.security.Security; import java.util.Properties; import javax.activation.CommandMap; import javax.activation.DataHandler; import javax.activation.DataSource; import javax.activation.FileDataSource; import javax.activation.MailcapCommandMap; import javax.mail.Address; import javax.mail.BodyPart; import javax.mail.Message; import javax.mail.MessagingException; import javax.mail.Multipart; import javax.mail.PasswordAuthentication; import javax.mail.Session; import javax.mail.Transport; import javax.mail.internet.InternetAddress; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMessage; import javax.mail.internet.MimeMultipart; public class EmailSender extends javax.mail.Authenticator { private static final String TAG = "com.witopia.securemyemail.mail.PgpMailSender"; // private EmailSettings emailSettings; private String mailhost;// = "smtp.gmail.com"; private String user; private String password; private Session session; static { Security.addProvider(new JSSEProvider()); } public EmailSender() { this.user = "cdillon@bemoreio.com"; this.password = "monkey5$"; this.mailhost = GmailSettings.OUTGOING_SERVER_HOST; Properties props = new Properties(); props.setProperty("mail.transport.protocol", "smtp"); props.setProperty("mail.host", mailhost); props.put("mail.smtp.auth", "true"); props.put("mail.smtp.port", "465"); props.put("mail.smtp.socketFactory.port", "465"); props.put("mail.smtp.socketFactory.class", "javax.net.ssl.SSLSocketFactory"); props.put("mail.smtp.socketFactory.fallback", "false"); props.setProperty("mail.smtp.quitwait", "false"); session = Session.getDefaultInstance(props, this); session = Session.getInstance(props, this); } private void setSecurityType(Properties props) { props.put("mail.smtp.timeout", "5000"); //User needs to supply username/password props.put("mail.smtp.auth", "true"); //Connection should issue "STARTTLS" command on handshake props.put("mail.smtp.starttls.enable", "true"); //Connection is assumed to be SSL props.setProperty("mail.smtp.ssl.enable", "true"); } protected PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(user, password); } // public void testConnection() throws javax.mail.NoSuchProviderException, MessagingException // { // Transport transport = session.getTransport("smtps"); // // transport.connect(this.emailSettings.getOutgoingServerHost(), this.emailSettings.getOutgoingServerPort(), this.emailSettings.getOutgoingUsername(), password); // transport.close(); // } public Message getMessageWithAttachment(String toEmail, String filepath) throws UnsupportedEncodingException, MessagingException { MimeMessage message = new MimeMessage(session); InternetAddress from = new InternetAddress(); from.setAddress(user); String personal = "POD Chief"; from.setPersonal(personal); message.setFrom(from); message.setSender(from); message.setSubject("POD Chief"); // Create the message part BodyPart messageBodyPart = new MimeBodyPart(); // Fill the message messageBodyPart.setText("This is message body"); // Create a multipar message Multipart multipart = new MimeMultipart(); // Set text message part multipart.addBodyPart(messageBodyPart); // Part two is attachment messageBodyPart = new MimeBodyPart(); DataSource source = new FileDataSource(filepath); messageBodyPart.setDataHandler(new DataHandler(source)); messageBodyPart.setFileName(filepath); multipart.addBodyPart(messageBodyPart); MailcapCommandMap mc = (MailcapCommandMap) CommandMap.getDefaultCommandMap(); mc.addMailcap("text/html;; x-java-content-handler=com.sun.mail.handlers.text_html"); mc.addMailcap("text/xml;; x-java-content-handler=com.sun.mail.handlers.text_xml"); mc.addMailcap("text/plain;; x-java-content-handler=com.sun.mail.handlers.text_plain"); mc.addMailcap("multipart/*;; x-java-content-handler=com.sun.mail.handlers.multipart_mixed"); mc.addMailcap("message/rfc822;; x-java-content- handler=com.sun.mail.handlers.message_rfc822"); // Send the complete message parts message.setContent(multipart); Address[] toRecipients = new Address[] { new InternetAddress(toEmail)}; message.setRecipients(Message.RecipientType.TO, toRecipients); return message; } public Boolean sendMailWithAttachment(String toEmail, String filePath) { Boolean result = true; try { Message message = this.getMessageWithAttachment(toEmail, filePath); Transport.send(message); } catch (Exception e) { result = false; e.printStackTrace(); Log.e("PODChief", "Error Send Email: " + e.getMessage()); } return result; } public class ByteArrayDataSource implements DataSource { private byte[] data; private String type; public ByteArrayDataSource(byte[] data, String type) { super(); this.data = data; this.type = type; } public ByteArrayDataSource(byte[] data) { super(); this.data = data; } public void setType(String type) { this.type = type; } public String getContentType() { if (type == null) return "application/octet-stream"; else return type; } public InputStream getInputStream() throws IOException { return new ByteArrayInputStream(data); } public String getName() { return "ByteArrayDataSource"; } public OutputStream getOutputStream() throws IOException { throw new IOException("Not Supported"); } } }
/* * Copyright (C) 2010 Christian Gawron * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.cgawron.go; import java.beans.PropertyChangeSupport; import java.util.Collection; import java.util.Vector; import java.util.logging.Level; import java.util.logging.Logger; /** * A {@link Goban} providing <code>PropertyChangeSupport</code>. * * @author Christian Gawron * @version $Id: AbstractGoban.java 155 2005-01-04 10:47:49Z cgawron $ * @see PropetrtyChangeSupport */ public abstract class AbstractGoban implements Goban { protected static Logger logger = Logger.getLogger(AbstractGoban.class.getName()); private final PropertyChangeSupport pcs = new PropertyChangeSupport(this); protected Collection<GobanListener> listeners = new java.util.ArrayList<GobanListener>(); protected int blackCaptured; protected int whiteCaptured; protected Point lastMove; protected int boardSize = 0; protected Vector<Point> removed = new Vector<Point>(); public int _hash(Symmetry s) { Point.BoardIterator it = new Point.BoardIterator(getBoardSize()); int h = 0; int n = 0; while (it.hasNext()) { Point p = (Point) it.next(); BoardType stone = getStone(p); if (stone != BoardType.EMPTY) { n++; Point pt = s.transform(p, getBoardSize()); int z = zobrist[pt.getY() * getBoardSize() + pt.getX()]; if (s.transform(stone) == BoardType.BLACK) h += z; else h -= z; } } // if (sym & 8 != 0) h ^= 0xffffffff; h = (h & 0x01ffffff) | ((n & 0xfe) << (32 - 7)); return h; } protected void addCaptureStones(BoardType color, int size) { switch(color) { case WHITE: whiteCaptured += size; break; case BLACK: blackCaptured += size; break; default: throw new IllegalArgumentException("Can't capure " + color); } } /** addGobanListener method comment. */ public void addGobanListener(GobanListener l) { listeners.add(l); } /** * Calculate the chinese score of the position. * This method assumes that all dead stones are already removed, i.e. all * stones on the board are considered alive, and territories containing stones of both colors are neutral. * @return The chinese score of the position. */ public abstract int chineseScore(double[][] territory); @Override abstract public Goban clone(); public boolean equals(Object o, Symmetry s) { if (o instanceof Goban) { Goban goban = (Goban) o; Point.BoardIterator it = new Point.BoardIterator(boardSize); while (it.hasNext()) { Point p = (Point) it.next(); Point pt = s.transform(p, boardSize); if (goban.getStone(p) != s.transform(getStone(pt))) return false; } return true; } return false; } public boolean equals(Object o) { if (o == this) return true; if (o instanceof Goban) { return o.hashCode() == hashCode(); } else return false; } /** * Insert the method's description here. Creation date: (03/25/00 16:07:59) * * @param x * int * @param y * int * @param c * goban.BoardType */ protected void fireModelChanged() { GobanEvent e = null; // Process the listeners last to first, notifying // those that are interested in this event for (GobanListener listener : listeners) { if (logger.isLoggable(Level.FINE)) logger.fine("Notifying listener ..."); // Lazily create the event: if (e == null) e = new GobanEvent(this); listener.modelChanged(e); } } /** * Insert the method's description here. Creation date: (03/25/00 16:07:59) * * @param x * int * @param y * int * @param c * goban.BoardType */ protected void fireStoneAdded(int x, int y, BoardType c) { GobanEvent e = null; // Process the listeners last to first, notifying // those that are interested in this event for (GobanListener listener : listeners) { if (logger.isLoggable(Level.FINE)) logger.fine("Notifying listener ..."); // Lazily create the event: if (e == null) e = new GobanEvent(this, x, y, c); listener.stoneAdded(e); } } /** * Insert the method's description here. Creation date: (03/25/00 16:07:59) * * @param x * int * @param y * int * @param c * goban.BoardType */ protected void fireStonesRemoved(Collection<Point> removed) { // Guaranteed to return a non-null array GobanEvent e = null; // Process the listeners last to first, notifying // those that are interested in this event for (GobanListener listener : listeners) { // Lazily create the event: if (e == null) e = new GobanEvent(this, removed); listener.stonesRemoved(e); } } @Override public int getBlackCaptured() { return blackCaptured; } /** getBoardSize method comment. */ public final int getBoardSize() { return boardSize; } public Point getLastMove() { return lastMove; } public Vector<Point> getRemoved() { return removed; } public BoardType getStone(Point p) { return getStone(p.getX(), p.getY()); } @Override public int getWhiteCaptured() { return whiteCaptured; } public int hashCode() { return zobristHash(); } /** * Checks if there are any listeners for a specific property. * * @param propertyName * The name of the property * @return <code>true</code>if there are one or more listeners for the given * property */ public boolean hasListeners(String propertyName) { return pcs.hasListeners(propertyName); } @Override public boolean move(Point p, BoardType color) { return move(p.getX(), p.getY(), color); } @Override public boolean move(Point p, BoardType color, int moveNo) { return move(p.getX(), p.getY(), color); } @Override public void putStone(Point p, BoardType color) { putStone(p.getX(), p.getY(), color); } /** addGobanListener method comment. */ public void removeGobanListener(GobanListener l) { listeners.remove(l); } public String toString() { StringBuffer s = new StringBuffer(512); int i, j; s.append("\n"); BoardType p; for (i = 0; i < getBoardSize(); i++) { for (j = 0; j < getBoardSize(); j++) { p = getStone(i, j); if (p == BoardType.WHITE) s.append("O "); else if (p == BoardType.BLACK) s.append("X "); else s.append(". "); } s.append('\n'); } return s.toString(); } }
package org.alienideology.jcord.handle.guild; import org.alienideology.jcord.handle.IDiscordObject; import org.alienideology.jcord.handle.ISnowFlake; import org.alienideology.jcord.handle.Region; import org.alienideology.jcord.handle.audit.IAuditLog; import org.alienideology.jcord.handle.channel.IGuildChannel; import org.alienideology.jcord.handle.channel.ITextChannel; import org.alienideology.jcord.handle.channel.IVoiceChannel; import org.alienideology.jcord.handle.managers.IGuildManager; import org.alienideology.jcord.handle.managers.IInviteManager; import org.alienideology.jcord.handle.user.IUser; import org.alienideology.jcord.handle.user.IWebhook; import org.alienideology.jcord.internal.rest.HttpPath; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.List; /** * Guild - A collection of users and channels, often referred to in the UI as a server. * @author AlienIdeology */ public interface IGuild extends IDiscordObject, ISnowFlake { /** * The minimum length of a guild's name. */ int NAME_LENGTH_MIN = 2; /** * The maximum length of a guild's name. */ int NAME_LENGTH_MAX = 100; /** * Checks if a guild's name is valid or not. * * Validations: <br /> * <ul> * <li>The name may not be null or empty.</li> * <li>The length of the name must be between {@link #NAME_LENGTH_MIN} and {@link #NAME_LENGTH_MAX}.</li> * </ul> * * @param name The name to be check with. * @return True if the name is valid. */ static boolean isValidName(String name) { return name != null && !name.isEmpty() && name.length() >= NAME_LENGTH_MIN && name.length() <= NAME_LENGTH_MAX; } /** * Leave this guild. */ default void leave() { getIdentity().getSelfManager().leaveGuild(this); } /** * @return True if the guild is available (no temporary shortage happens to discord server) */ boolean isAvailable(); /** * Get the IGuildManager of this guild. * The guild managers is used to kick, ban, unban, and change guild settings. * * @return The guild manager. */ IGuildManager getManager(); /** * Get the IInviteManager for this guild. * All the invite actions can be found in the manager. * * @return The invite manager. */ IInviteManager getInviteManager(); /** * Get the name of this guild. * * @return The string name. */ String getName(); /** * Get the icon hash value. * @see #getIconUrl() for getting icon url. * * @return The string icon hash. */ String getIconHash(); /** * Get the icon url of this guild. * * @return The string url, or null if the guild does not have an icon. */ default String getIconUrl() { return getIconHash() == null ? null : String.format(HttpPath.EndPoint.GUILD_ICON, getIconUrl(), getIconHash()); } /** * Get the splash icon url of this guild. A guild can get splash by partnering with Discord. * * @return The string url, or null if the guild does not have splash. */ String getSplash(); /** * Get the region of this guild's voice channels. * * @return The region enumeration representing this guild's region. */ Region getRegion(); /** * Get the AFK timeout of this guild. * @see #getAfkChannel() * * @return The integer value of the timeout. */ AFKTimeout getAfkTimeout(); /** * Get the AFK voice channel of this guild. * @see #getAfkTimeout() * * @return The channel, or null if no channel is set. */ @Nullable IVoiceChannel getAfkChannel(); /** * @see #getEmbedChannel() * @return True if an widget(embed) is enabled for this guild. */ boolean isEmbedEnabled(); /** * Get the channel of the embed widget. * @see #isEmbedEnabled() * * @return The channel, or null if no embed is set. */ @Nullable ITextChannel getEmbedChannel(); /** * Get the verification level of this guild. * * @return The verification level. */ IGuild.Verification getVerificationLevel(); /** * Get the notification level of this guild. * * @return The notification level. */ IGuild.Notification getNotificationsLevel(); /** * Get the explicit content filter level of this guild. * * @return The content filter level. */ IGuild.ContentFilterLevel getContentFilterLevel(); /** * Get the MFA (Server Two-Factor Authentication) level of this guild. * * @return The MFA level. */ IGuild.MFA getMFALevel(); /** * Get the guild owner. * * @return The owner of this guild. */ IMember getOwner(); /** * Get the audit log for this guild, with an specified amount of logs to receive. * This kind of works like a {@link org.alienideology.jcord.handle.channel.MessageHistory}, * with the method {@link org.alienideology.jcord.handle.channel.MessageHistory#getLatestMessages(int)}. * * @param amount The amount of logs to receive. * @return The audit log. */ IAuditLog getAuditLog(int amount); /** * Get the audit log for this guild, with an specified amount of logs to receive before a certain audit log. * This kind of works like a {@link org.alienideology.jcord.handle.channel.MessageHistory}, * with the method {@link org.alienideology.jcord.handle.channel.MessageHistory#getMessagesBefore(String, int)}. * * @param entryId The log entry to receive logs before it. * @param amount The amount of logs to receive. * @return The audit log. */ IAuditLog getAuditLogBefore(String entryId, int amount); /** * Get a list of users belong to this guild. * * @return A list of users of this guild. */ List<IUser> getUsers(); /** * Get the member instance of the identity. * * @return The member instance */ IMember getSelfMember(); /** * Get a member by key. * * @param id The specified key * @return a Member or null if no member was found. */ @Nullable IMember getMember(String id); /** * Get a list of members belong to this guild. * * @return A list of members of this guild. */ List<IMember> getMembers(); /** * Get a webhook by key. * If the identity does not have {@code Manager Webhooks} permission, then this will always returns {@code null}. * * @param id The webhook key. * @return The webhook found, or null if no webhook has been found. */ @Nullable IWebhook getWebhook(String id); /** * Get a list of webhooks belong to this guild. * It is recommended to cache the returned list, since this * * @exception org.alienideology.jcord.internal.exception.PermissionException * If the identity does not have {@code Manager Webhooks} permission. * @return A list of webhooks. */ List<IWebhook> getWebhooks(); /** * Get a role by key. * * @param id The specified key * @return a Role or null if no role was found. */ @Nullable IRole getRole(String id); /** * Get the {@code @everyone} role. * * @return The @everyone role of this guild. */ @NotNull IRole getEveryoneRole(); /** * Get all roles in this guild. * * @return A list of roles for this guild. */ List<IRole> getRoles(); /** * Get a guild emoji by key. * * @param id The specified key * @return a GuildEmoji or null if no emoji was found. */ @Nullable IGuildEmoji getGuildEmoji(String id); /** * Get all guild emojis in this guild. * * @return A list of server custom emojis this guild has. */ List<IGuildEmoji> getGuildEmojis(); /** * Get a guild channel by ID. * * @param id The channel ID. * @return A guild channel, can be text or voice channel. */ @Nullable IGuildChannel getGuildChannel(String id); /** * Get all the guild channels in this guild. * * @return A unmodifiable list of guild channels. */ List<IGuildChannel> getAllGuildChannels(); /** * Get a text channel by key. * * @param id The specified key * @return a TextChannel or null if no channel was found. */ @Nullable ITextChannel getTextChannel(String id); /** * Get the default text channel of this guild. * The default channel is the first channel with highest position * that the identity has permission to {@code Read Messages}. * * @return The text channel. */ ITextChannel getDefaultChannel(); /** * Get the default text channel for this member. * The default channel is the first channel with highest position * that the member has permission to {@code Read Messages}. * * @param member The member. * @return The text channel. */ ITextChannel getDefaultChannel(IMember member); /** * Get a list of text channels belong to this guild. * * @return A list of text channels this guild has. */ List<ITextChannel> getTextChannels(); /** * Get a voice channel by key. * @param id The specified key * @return a VoiceChannel or null if no channel is found. */ @Nullable IVoiceChannel getVoiceChannel(String id); /** * Get a list of voice channels belong to this guild. * * @return A list of voice channels this guild has. */ List<IVoiceChannel> getVoiceChannels(); /** * Ger a list of all integrations belong to this guild. * * @return A list of integrations. */ List<IIntegration> getIntegrations(); /* -------------------------- Guild Enumerations -------------------------- */ /** * AFK Timeouts (second) */ enum AFKTimeout { MINUTE_1 (60), MINUTES_5 (300), MINUTES_10 (600), MINUTES_30 (1800), HOUR_1 (3600), UNKNOWN (-1); public int key; AFKTimeout(int key) { this.key = key; } public static AFKTimeout getByTimeout (int timeout) { if (Arrays.stream(values()).anyMatch(afk -> afk.key == timeout)) { return Arrays.stream(values()).filter(afk -> afk.key == timeout).findFirst().get(); } else { return UNKNOWN; } } } /** * Guild Verification Level */ enum Verification { NONE (0), LOW (1), MEDIUM (2), HIGH (3), VERY_HIGH (4), UNKNOWN (-1); public final int key; Verification (int key) { this.key = key; } public static Verification getByKey(int key) { for (Verification verify : values()) { if (verify.key == key) return verify; } return UNKNOWN; } } /** * Guild Notification Level */ enum Notification { ALL_MESSAGE (0), ONLY_MENTIONS (1), UNKNOWN (-1); public final int key; Notification (int key) { this.key = key; } public static Notification getByKey(int key) { for (Notification notif : values()) { if (notif.key == key) return notif; } return UNKNOWN; } } /** * Guild Explicit Content Filter Level */ enum ContentFilterLevel { DISABLED (0), MEMBERS_WITHOUT_ROLES (1), ALL_MEMBERS (2), UNKNOWN (-1); public final int key; ContentFilterLevel (int key) { this.key = key; } public static ContentFilterLevel getByKey(int key) { for (ContentFilterLevel cfl : values()) { if (cfl.key == key) return cfl; } return UNKNOWN; } } /** * Guild MFA Level */ enum MFA { NONE (0), ELEVATED (1), UNKNOWN (-1); public final int key; MFA (int key) { this.key = key; } public static MFA getByKey(int key) { for (MFA mfa : values()) { if (mfa.key == key) return mfa; } return UNKNOWN; } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.elasticmapreduce.model; import java.io.Serializable; /** * <p> * Specification of a job flow step. * </p> */ public class StepConfig implements Serializable, Cloneable { /** * <p> * The name of the job flow step. * </p> */ private String name; /** * <p> * The action to take if the job flow step fails. * </p> */ private String actionOnFailure; /** * <p> * The JAR file used for the job flow step. * </p> */ private HadoopJarStepConfig hadoopJarStep; /** * Default constructor for StepConfig object. Callers should use the setter * or fluent setter (with...) methods to initialize the object after * creating it. */ public StepConfig() { } /** * Constructs a new StepConfig object. Callers should use the setter or * fluent setter (with...) methods to initialize any additional object * members. * * @param name * The name of the job flow step. * @param hadoopJarStep * The JAR file used for the job flow step. */ public StepConfig(String name, HadoopJarStepConfig hadoopJarStep) { setName(name); setHadoopJarStep(hadoopJarStep); } /** * <p> * The name of the job flow step. * </p> * * @param name * The name of the job flow step. */ public void setName(String name) { this.name = name; } /** * <p> * The name of the job flow step. * </p> * * @return The name of the job flow step. */ public String getName() { return this.name; } /** * <p> * The name of the job flow step. * </p> * * @param name * The name of the job flow step. * @return Returns a reference to this object so that method calls can be * chained together. */ public StepConfig withName(String name) { setName(name); return this; } /** * <p> * The action to take if the job flow step fails. * </p> * * @param actionOnFailure * The action to take if the job flow step fails. * @see ActionOnFailure */ public void setActionOnFailure(String actionOnFailure) { this.actionOnFailure = actionOnFailure; } /** * <p> * The action to take if the job flow step fails. * </p> * * @return The action to take if the job flow step fails. * @see ActionOnFailure */ public String getActionOnFailure() { return this.actionOnFailure; } /** * <p> * The action to take if the job flow step fails. * </p> * * @param actionOnFailure * The action to take if the job flow step fails. * @return Returns a reference to this object so that method calls can be * chained together. * @see ActionOnFailure */ public StepConfig withActionOnFailure(String actionOnFailure) { setActionOnFailure(actionOnFailure); return this; } /** * <p> * The action to take if the job flow step fails. * </p> * * @param actionOnFailure * The action to take if the job flow step fails. * @return Returns a reference to this object so that method calls can be * chained together. * @see ActionOnFailure */ public void setActionOnFailure(ActionOnFailure actionOnFailure) { this.actionOnFailure = actionOnFailure.toString(); } /** * <p> * The action to take if the job flow step fails. * </p> * * @param actionOnFailure * The action to take if the job flow step fails. * @return Returns a reference to this object so that method calls can be * chained together. * @see ActionOnFailure */ public StepConfig withActionOnFailure(ActionOnFailure actionOnFailure) { setActionOnFailure(actionOnFailure); return this; } /** * <p> * The JAR file used for the job flow step. * </p> * * @param hadoopJarStep * The JAR file used for the job flow step. */ public void setHadoopJarStep(HadoopJarStepConfig hadoopJarStep) { this.hadoopJarStep = hadoopJarStep; } /** * <p> * The JAR file used for the job flow step. * </p> * * @return The JAR file used for the job flow step. */ public HadoopJarStepConfig getHadoopJarStep() { return this.hadoopJarStep; } /** * <p> * The JAR file used for the job flow step. * </p> * * @param hadoopJarStep * The JAR file used for the job flow step. * @return Returns a reference to this object so that method calls can be * chained together. */ public StepConfig withHadoopJarStep(HadoopJarStepConfig hadoopJarStep) { setHadoopJarStep(hadoopJarStep); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getName() != null) sb.append("Name: " + getName() + ","); if (getActionOnFailure() != null) sb.append("ActionOnFailure: " + getActionOnFailure() + ","); if (getHadoopJarStep() != null) sb.append("HadoopJarStep: " + getHadoopJarStep()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof StepConfig == false) return false; StepConfig other = (StepConfig) obj; if (other.getName() == null ^ this.getName() == null) return false; if (other.getName() != null && other.getName().equals(this.getName()) == false) return false; if (other.getActionOnFailure() == null ^ this.getActionOnFailure() == null) return false; if (other.getActionOnFailure() != null && other.getActionOnFailure().equals(this.getActionOnFailure()) == false) return false; if (other.getHadoopJarStep() == null ^ this.getHadoopJarStep() == null) return false; if (other.getHadoopJarStep() != null && other.getHadoopJarStep().equals(this.getHadoopJarStep()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode()); hashCode = prime * hashCode + ((getActionOnFailure() == null) ? 0 : getActionOnFailure() .hashCode()); hashCode = prime * hashCode + ((getHadoopJarStep() == null) ? 0 : getHadoopJarStep() .hashCode()); return hashCode; } @Override public StepConfig clone() { try { return (StepConfig) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * <p/> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.bpmn.analytics.publisher; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.bpmn.analytics.publisher.internal.BPMNAnalyticsHolder; import org.wso2.carbon.bpmn.analytics.publisher.models.BPMNProcessInstance; import org.wso2.carbon.bpmn.analytics.publisher.models.BPMNTaskInstance; import org.wso2.carbon.bpmn.analytics.publisher.utils.AnalyticsPublishServiceUtils; import org.wso2.carbon.bpmn.analytics.publisher.utils.BPMNDataReceiverConfig; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.context.RegistryType; import org.wso2.carbon.databridge.agent.DataPublisher; import org.wso2.carbon.databridge.agent.exception.DataEndpointAgentConfigurationException; import org.wso2.carbon.databridge.agent.exception.DataEndpointAuthenticationException; import org.wso2.carbon.databridge.agent.exception.DataEndpointConfigurationException; import org.wso2.carbon.databridge.agent.exception.DataEndpointException; import org.wso2.carbon.databridge.commons.exception.TransportException; import org.wso2.carbon.databridge.commons.utils.DataBridgeCommonsUtils; import org.wso2.carbon.registry.api.Registry; import org.wso2.carbon.registry.api.RegistryException; import javax.xml.stream.XMLStreamException; import java.io.IOException; import java.net.MalformedURLException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; /** * AnalyticsPublisher uses to publish events to the data receiver in the data-bridge * Two streams are defined. One is for the processes and the other is for the tasks. * process_stream(processDefinitionId, processInstanceId, startActivityId, startUserId, startTime, endTime, duration, tenantId) * task_stream(taskDefinitionKey, taskInstanceId, processInstanceId, createTime, startTime, endTime, duration, assignee) */ public class AnalyticsPublisher { private static final Log log = LogFactory.getLog(AnalyticsPublisher.class); private String processInstanceStreamId; private String taskInstanceStreamId; private DataPublisher dataPublisher; private AnalyticsPublishServiceUtils analyticsPublishServiceUtils; private ExecutorService analyticsExecutorService; /** * Initialize the objects for AnalyticsPublisher */ public boolean initialize(BPMNDataReceiverConfig config) throws Exception { try { if (BPMNDataReceiverConfig.isDASPublisherActivated()) { analyticsExecutorService = Executors.newSingleThreadExecutor(); //RegistryUtils.setTrustStoreSystemProperties(); //dataPublisher = createDataPublisher(config); processInstanceStreamId = getProcessStreamId(); taskInstanceStreamId = getTaskInstanceStreamId(); analyticsPublishServiceUtils = new AnalyticsPublishServiceUtils(); int tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId(); String tenantDomain = CarbonContext.getThreadLocalCarbonContext().getTenantDomain(); Registry registry = BPMNAnalyticsHolder.getInstance().getRegistryService().getGovernanceSystemRegistry(); startPollingForInstances(tenantId, tenantDomain, registry, config); } else { if (log.isDebugEnabled()) { log.debug("BPMN Data Publisher is not activated for server."); } // Nothing to here. return true; } } catch (IOException | RegistryException | XMLStreamException e) { throw new Exception("Data publisher objects initialization error.", e); } return false; } /** * Set thread local privileges to polling thread */ private void startPollingForInstances(final int tenantId, final String tenantDomain, final Registry registry, final BPMNDataReceiverConfig config) { if (log.isDebugEnabled()) { log.debug("Run startPollingForInstances method... " + tenantId + ", " + tenantDomain + ", " + registry); } analyticsExecutorService.execute(new Runnable() { @Override public void run() { try { PrivilegedCarbonContext.startTenantFlow(); PrivilegedCarbonContext privilegedCarbonContext = PrivilegedCarbonContext.getThreadLocalCarbonContext(); privilegedCarbonContext.setTenantId(tenantId, true); privilegedCarbonContext.setTenantDomain(tenantDomain, true); privilegedCarbonContext.setRegistry(RegistryType.SYSTEM_GOVERNANCE, registry); doPollingForInstances(config); } finally { PrivilegedCarbonContext.endTenantFlow(); } } }); } /** * Polling for Process instances */ private void doPollingForInstances(BPMNDataReceiverConfig config) { if (log.isDebugEnabled()) { log.debug("Start polling for process instances..."); } try { Thread.sleep(AnalyticsPublisherConstants.DELAY); while (true) { if(config.isDataPublisherEnabled()) { if(dataPublisher == null ){ log.info("Re-initializing data publisher for tenant ID : " + config.getTenantID()); dataPublisher = createDataPublisher(config); } // Still data publisher can be null, due to miss configuration. if(dataPublisher != null) { BPMNProcessInstance[] bpmnProcessInstances = analyticsPublishServiceUtils.getCompletedProcessInstances(); if (log.isDebugEnabled()) { log.debug("publishing data to the receiver urlset:" + config.getReceiverURLsSet()); } if (bpmnProcessInstances != null && bpmnProcessInstances.length > 0) { for (BPMNProcessInstance instance : bpmnProcessInstances) { publishBPMNProcessInstanceEvent(instance); } } BPMNTaskInstance[] bpmnTaskInstances = analyticsPublishServiceUtils.getCompletedTaskInstances(); if (bpmnTaskInstances != null && bpmnTaskInstances.length > 0) { for (BPMNTaskInstance instance : bpmnTaskInstances) { publishBPMNTaskInstanceEvent(instance); } } Thread.sleep(AnalyticsPublisherConstants.REPEATEDLY_DELAY); }else { log.warn("Can't initializing data publisher for tenant ID : " + config.getTenantID() + "Next attempt will be in " + AnalyticsPublisherConstants.NEXT_CHECK_DELAY + " ms. Or Disable data publisher for this tenant."); Thread.sleep(AnalyticsPublisherConstants.NEXT_CHECK_DELAY); } } else { dataPublisher = null; if (log.isDebugEnabled()) { log.debug("Analytics publisher is disabled for this tenant. Next check will be in " + AnalyticsPublisherConstants.NEXT_CHECK_DELAY + " ms."); } Thread.sleep(AnalyticsPublisherConstants.NEXT_CHECK_DELAY); } } } catch (InterruptedException e) { //nothing to do } } /** * Publish process instance as events to the data receiver * * @param bpmnProcessInstance BPMN process instance to retrieve the data for payload param of data publisher's publish method */ private void publishBPMNProcessInstanceEvent(BPMNProcessInstance bpmnProcessInstance) { Object[] payload = new Object[]{ bpmnProcessInstance.getProcessDefinitionId(), bpmnProcessInstance.getInstanceId(), bpmnProcessInstance.getStartActivityId(), bpmnProcessInstance.getStartUserId(), bpmnProcessInstance.getStartTime().toString(), bpmnProcessInstance.getEndTime().toString(), bpmnProcessInstance.getDuration(), bpmnProcessInstance.getTenantId() }; if (log.isDebugEnabled()) { log.debug("Start to Publish BPMN process instance event... " + payload.toString()); } dataPublisher.tryPublish(getProcessStreamId(), getMeta(), null, payload); if (log.isDebugEnabled()) { log.debug("Published BPMN process instance event... " + payload.toString()); } } /** * Publish task instance as events to the data receiver * * @param bpmnTaskInstance BPMN task instance to retrieve the data for payload param of data publisher's publish method */ private void publishBPMNTaskInstanceEvent(BPMNTaskInstance bpmnTaskInstance) { Object[] payload = new Object[]{ bpmnTaskInstance.getTaskDefinitionKey(), bpmnTaskInstance.getTaskInstanceId(), bpmnTaskInstance.getProcessInstanceId(), bpmnTaskInstance.getCreateTime().toString(), bpmnTaskInstance.getStartTime().toString(), bpmnTaskInstance.getEndTime().toString(), bpmnTaskInstance.getDurationInMills(), bpmnTaskInstance.getAssignee() }; if (log.isDebugEnabled()) { log.debug("Start to Publish BPMN task instance event... " + payload.toString()); } dataPublisher.tryPublish(taskInstanceStreamId, getMeta(), null, payload); if (log.isDebugEnabled()) { log.debug("Published BPMN task instance event... " + payload.toString()); } } // Note: From DAS 3.0.0 onwards, it is no longer supporting on fly stream definition creation. User must create stream definition before publishing it. // /** // * Retrieve stream id to uniquely identify the stream for process instances // * // * @return the stream id of process instance stream // * @throws MalformedStreamDefinitionException // * @throws DifferentStreamDefinitionAlreadyDefinedException // * @throws StreamDefinitionException // * @throws AgentException // */ // private String getBPMNProcessInstanceStreamId() throws MalformedStreamDefinitionException, // DifferentStreamDefinitionAlreadyDefinedException, // StreamDefinitionException, // AgentException { // StreamDefinition streamDefinition = // new StreamDefinition(AnalyticsPublisherConstants.PROCESS_STREAM_NAME, // AnalyticsPublisherConstants.STREAM_VERSION); // streamDefinition.setDescription(AnalyticsPublisherConstants.PROCESS_STREAM_DESCRIPTION); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.PROCESS_DEFINITION_ID, // AttributeType.STRING); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.PROCESS_INSTANCE_ID, // AttributeType.STRING); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.START_ACTIVITY_ID, // AttributeType.STRING); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.START_USER_ID, AttributeType.STRING); // streamDefinition // .addPayloadData(AnalyticsPublisherConstants.START_TIME, AttributeType.STRING); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.END_TIME, AttributeType.STRING); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.DURATION, AttributeType.LONG); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.TENANT_ID, AttributeType.STRING); // return dataPublisher.defineStream(streamDefinition); // } // // /** // * Retrieve stream id to uniquely identify the stream for task instances // * // * @return the stream id of task instance stream // * @throws MalformedStreamDefinitionException // * @throws DifferentStreamDefinitionAlreadyDefinedException // * @throws StreamDefinitionException // * @throws AgentException // */ // private String getBPMNTaskInstanceStreamId() throws MalformedStreamDefinitionException, // DifferentStreamDefinitionAlreadyDefinedException, // StreamDefinitionException, AgentException { // StreamDefinition streamDefinition = // new StreamDefinition(AnalyticsPublisherConstants.TASK_STREAM_NAME, // AnalyticsPublisherConstants.STREAM_VERSION); // streamDefinition.setDescription(AnalyticsPublisherConstants.TASK_STREAM_DESCRIPTION); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.TASK_DEFINITION_ID, // AttributeType.STRING); // streamDefinition // .addPayloadData(AnalyticsPublisherConstants.TASK_INSTANCE_ID, AttributeType.STRING); // streamDefinition // .addPayloadData(AnalyticsPublisherConstants.PROCESS_INSTANCE_ID, AttributeType.STRING); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.CREATE_TIME, AttributeType.STRING); // streamDefinition // .addPayloadData(AnalyticsPublisherConstants.START_TIME, AttributeType.STRING); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.END_TIME, AttributeType.STRING); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.DURATION, AttributeType.LONG); // streamDefinition.addPayloadData(AnalyticsPublisherConstants.ASSIGNEE, AttributeType.STRING); // return dataPublisher.defineStream(streamDefinition); // } /** * Create a data publisher to publish the data as events * * @return DataPublisher object * @throws MalformedURLException */ private DataPublisher createDataPublisher(BPMNDataReceiverConfig config) { DataPublisher dataPublisher = null; if (config != null) { String type = config.getType(); String receiverURLsSet = config.getReceiverURLsSet(); String authURLsSet = config.getAuthURLsSet(); String userName = config.getUserName(); String password = config.getPassword(); if (log.isDebugEnabled()) { log.debug("BPMNDataReceiverConfig { type :" + type + " , username " + userName + " , receiverURLsSet " + receiverURLsSet + " , authURLsSet " + authURLsSet + " }"); } if (receiverURLsSet != null && userName != null && password != null) { try { dataPublisher = new DataPublisher(type, receiverURLsSet, authURLsSet, userName, password); } catch (DataEndpointAgentConfigurationException | DataEndpointException | DataEndpointConfigurationException | DataEndpointAuthenticationException | TransportException e) { log.error("Error while creating data publisher. ", e); return null; } } else { log.warn( "Unable to create data publisher as one or more required BPMNDataReceiverConfig are not configured properly. " + "Check receiverURLsSet, userName, password fields."); } } else { log.warn("BPMNDataReceiverConfig instance is null. Could not create Data publisher."); } return dataPublisher; } /** * Stop the data publisher */ public boolean stopDataPublisher() { Integer tenantId = CarbonContext.getThreadLocalCarbonContext().getTenantId(); if (analyticsExecutorService != null && !analyticsExecutorService.isShutdown()) { log.info("Shutting down analytics executor service for tenant : " + tenantId); analyticsExecutorService.shutdownNow(); for (int i = 0; i < 5; i++) { if (analyticsExecutorService.isShutdown()) { log.info("analytics executor service shutdowned for tenant : " + tenantId); return true; } else { try { Thread.sleep(AnalyticsPublisherConstants.REPEATEDLY_DELAY); } catch (InterruptedException e) { // Nothing to do. } } } } else { log.info("analytics executor service not running for tenant : " + tenantId); return true; } log.warn("Unable to shutdown analytics executor service for tenant : " + tenantId); return false; } /** * Get meta data of the instances to publish them. * * @return new object array */ private Object[] getMeta() { return new Object[]{}; } /** * Get StreamId for Task Instances. * * @return StreamId */ private String getTaskInstanceStreamId() { return DataBridgeCommonsUtils.generateStreamId(AnalyticsPublisherConstants.TASK_STREAM_NAME, AnalyticsPublisherConstants.STREAM_VERSION); } /** * Get StreamId for processes. * * @return StreamId */ private String getProcessStreamId() { return DataBridgeCommonsUtils.generateStreamId(AnalyticsPublisherConstants.PROCESS_STREAM_NAME, AnalyticsPublisherConstants.STREAM_VERSION); } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.persistence.session; import org.drools.compiler.builder.impl.KnowledgeBuilderImpl; import org.drools.core.SessionConfiguration; import org.drools.core.TimerJobFactoryType; import org.drools.core.command.runtime.process.CompleteWorkItemCommand; import org.drools.core.command.runtime.process.GetProcessInstanceCommand; import org.drools.core.command.runtime.process.StartProcessCommand; import org.drools.core.definitions.InternalKnowledgePackage; import org.drools.core.impl.InternalKnowledgeBase; import org.drools.core.impl.KnowledgeBaseFactory; import org.jbpm.process.core.Work; import org.jbpm.process.core.impl.WorkImpl; import org.drools.persistence.PersistableRunner; import org.drools.persistence.jpa.JpaJDKTimerService; import org.drools.persistence.jpa.processinstance.JPAWorkItemManagerFactory; import org.jbpm.compiler.ProcessBuilderImpl; import org.jbpm.persistence.processinstance.JPAProcessInstanceManagerFactory; import org.jbpm.persistence.processinstance.JPASignalManagerFactory; import org.jbpm.persistence.session.objects.TestWorkItemHandler; import org.jbpm.process.core.timer.Timer; import org.jbpm.ruleflow.core.RuleFlowProcess; import org.jbpm.ruleflow.instance.RuleFlowProcessInstance; import org.jbpm.test.util.AbstractBaseTest; import org.jbpm.workflow.core.Node; import org.jbpm.workflow.core.impl.ConnectionImpl; import org.jbpm.workflow.core.impl.DroolsConsequenceAction; import org.jbpm.workflow.core.node.ActionNode; import org.jbpm.workflow.core.node.EndNode; import org.jbpm.workflow.core.node.StartNode; import org.jbpm.workflow.core.node.SubProcessNode; import org.jbpm.workflow.core.node.TimerNode; import org.jbpm.workflow.core.node.WorkItemNode; import org.jbpm.workflow.instance.node.SubProcessNodeInstance; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.kie.api.definition.KiePackage; import org.kie.api.runtime.Environment; import org.kie.api.runtime.conf.TimerJobFactoryOption; import org.kie.api.runtime.process.NodeInstance; import org.kie.api.runtime.process.ProcessInstance; import org.kie.api.runtime.process.WorkItem; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.naming.InitialContext; import javax.transaction.UserTransaction; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Properties; import static org.jbpm.test.persistence.util.PersistenceUtil.*; import static org.junit.Assert.*; @RunWith(Parameterized.class) public class SingleSessionCommandServiceTest extends AbstractBaseTest { private static final Logger logger = LoggerFactory.getLogger(SingleSessionCommandServiceTest.class); private HashMap<String, Object> context; private Environment env; public SingleSessionCommandServiceTest(boolean locking) { this.useLocking = locking; } @Parameters public static Collection<Object[]> persistence() { Object[][] data = new Object[][] { { false }, { true } }; return Arrays.asList(data); }; public void setUp() { String testMethodName = Thread.currentThread().getStackTrace()[2].getMethodName(); context = setupWithPoolingDataSource(JBPM_PERSISTENCE_UNIT_NAME); env = createEnvironment(context); } @After public void tearDown() { cleanUp(context); } @Test public void testPersistenceWorkItems() throws Exception { setUp(); InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); Collection<KiePackage> kpkgs = getProcessWorkItems(); kbase.addPackages( kpkgs ); Properties properties = new Properties(); properties.setProperty( "drools.commandService", PersistableRunner.class.getName() ); properties.setProperty( "drools.processInstanceManagerFactory", JPAProcessInstanceManagerFactory.class.getName() ); properties.setProperty( "drools.workItemManagerFactory", JPAWorkItemManagerFactory.class.getName() ); properties.setProperty( "drools.processSignalManagerFactory", JPASignalManagerFactory.class.getName() ); properties.setProperty( "drools.timerService", JpaJDKTimerService.class.getName() ); SessionConfiguration config = SessionConfiguration.newInstance( properties ); PersistableRunner service = new PersistableRunner( kbase, config, env ); Long sessionId = service.getSessionId(); StartProcessCommand startProcessCommand = new StartProcessCommand(); startProcessCommand.setProcessId( "org.drools.test.TestProcess" ); ProcessInstance processInstance = service.execute( startProcessCommand ); logger.info( "Started process instance {}", processInstance.getId() ); TestWorkItemHandler handler = TestWorkItemHandler.getInstance(); WorkItem workItem = handler.getWorkItem(); assertNotNull( workItem ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); GetProcessInstanceCommand getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); assertNotNull( processInstance ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); CompleteWorkItemCommand completeWorkItemCommand = new CompleteWorkItemCommand(); completeWorkItemCommand.setWorkItemId( workItem.getId() ); service.execute( completeWorkItemCommand ); workItem = handler.getWorkItem(); assertNotNull( workItem ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); assertNotNull( processInstance ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); completeWorkItemCommand = new CompleteWorkItemCommand(); completeWorkItemCommand.setWorkItemId( workItem.getId() ); service.execute( completeWorkItemCommand ); workItem = handler.getWorkItem(); assertNotNull( workItem ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); assertNotNull( processInstance ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); completeWorkItemCommand = new CompleteWorkItemCommand(); completeWorkItemCommand.setWorkItemId( workItem.getId() ); service.execute( completeWorkItemCommand ); workItem = handler.getWorkItem(); assertNull( workItem ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); assertNull( processInstance ); service.dispose(); } @Test public void testPersistenceWorkItemsUserTransaction() throws Exception { setUp(); InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); Collection<KiePackage> kpkgs = getProcessWorkItems(); kbase.addPackages( kpkgs ); Properties properties = new Properties(); properties.setProperty( "drools.commandService", PersistableRunner.class.getName() ); properties.setProperty( "drools.processInstanceManagerFactory", JPAProcessInstanceManagerFactory.class.getName() ); properties.setProperty( "drools.workItemManagerFactory", JPAWorkItemManagerFactory.class.getName() ); properties.setProperty( "drools.processSignalManagerFactory", JPASignalManagerFactory.class.getName() ); properties.setProperty( "drools.timerService", JpaJDKTimerService.class.getName() ); SessionConfiguration config = SessionConfiguration.newInstance( properties ); PersistableRunner service = new PersistableRunner( kbase, config, env ); Long sessionId = service.getSessionId(); UserTransaction ut = (UserTransaction) new InitialContext().lookup( "java:comp/UserTransaction" ); ut.begin(); StartProcessCommand startProcessCommand = new StartProcessCommand(); startProcessCommand.setProcessId( "org.drools.test.TestProcess" ); ProcessInstance processInstance = service.execute( startProcessCommand ); logger.info( "Started process instance {}", processInstance.getId() ); ut.commit(); TestWorkItemHandler handler = TestWorkItemHandler.getInstance(); WorkItem workItem = handler.getWorkItem(); assertNotNull( workItem ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); ut.begin(); GetProcessInstanceCommand getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); assertNotNull( processInstance ); ut.commit(); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); ut.begin(); CompleteWorkItemCommand completeWorkItemCommand = new CompleteWorkItemCommand(); completeWorkItemCommand.setWorkItemId( workItem.getId() ); service.execute( completeWorkItemCommand ); ut.commit(); workItem = handler.getWorkItem(); assertNotNull( workItem ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); ut.begin(); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); ut.commit(); assertNotNull( processInstance ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); ut.begin(); completeWorkItemCommand = new CompleteWorkItemCommand(); completeWorkItemCommand.setWorkItemId( workItem.getId() ); service.execute( completeWorkItemCommand ); ut.commit(); workItem = handler.getWorkItem(); assertNotNull( workItem ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); ut.begin(); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); ut.commit(); assertNotNull( processInstance ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); ut.begin(); completeWorkItemCommand = new CompleteWorkItemCommand(); completeWorkItemCommand.setWorkItemId( workItem.getId() ); service.execute( completeWorkItemCommand ); ut.commit(); workItem = handler.getWorkItem(); assertNull( workItem ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); ut.begin(); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); ut.commit(); assertNull( processInstance ); service.dispose(); } private Collection<KiePackage> getProcessWorkItems() { RuleFlowProcess process = new RuleFlowProcess(); process.setId( "org.drools.test.TestProcess" ); process.setName( "TestProcess" ); process.setPackageName( "org.drools.test" ); StartNode start = new StartNode(); start.setId( 1 ); start.setName( "Start" ); process.addNode( start ); ActionNode actionNode = new ActionNode(); actionNode.setId( 2 ); actionNode.setName( "Action" ); DroolsConsequenceAction action = new DroolsConsequenceAction(); action.setDialect( "java" ); action.setConsequence( "System.out.println(\"Executed action\");" ); actionNode.setAction( action ); process.addNode( actionNode ); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE ); WorkItemNode workItemNode = new WorkItemNode(); workItemNode.setId( 3 ); workItemNode.setName( "WorkItem1" ); Work work = new WorkImpl(); work.setName( "MyWork" ); workItemNode.setWork( work ); process.addNode( workItemNode ); new ConnectionImpl( actionNode, Node.CONNECTION_DEFAULT_TYPE, workItemNode, Node.CONNECTION_DEFAULT_TYPE ); WorkItemNode workItemNode2 = new WorkItemNode(); workItemNode2.setId( 4 ); workItemNode2.setName( "WorkItem2" ); work = new WorkImpl(); work.setName( "MyWork" ); workItemNode2.setWork( work ); process.addNode( workItemNode2 ); new ConnectionImpl( workItemNode, Node.CONNECTION_DEFAULT_TYPE, workItemNode2, Node.CONNECTION_DEFAULT_TYPE ); WorkItemNode workItemNode3 = new WorkItemNode(); workItemNode3.setId( 5 ); workItemNode3.setName( "WorkItem3" ); work = new WorkImpl(); work.setName( "MyWork" ); workItemNode3.setWork( work ); process.addNode( workItemNode3 ); new ConnectionImpl( workItemNode2, Node.CONNECTION_DEFAULT_TYPE, workItemNode3, Node.CONNECTION_DEFAULT_TYPE ); EndNode end = new EndNode(); end.setId( 6 ); end.setName( "End" ); process.addNode( end ); new ConnectionImpl( workItemNode3, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE ); KnowledgeBuilderImpl packageBuilder = new KnowledgeBuilderImpl(); ProcessBuilderImpl processBuilder = new ProcessBuilderImpl( packageBuilder ); processBuilder.buildProcess( process, null ); List<KiePackage> list = new ArrayList<KiePackage>(); list.addAll( packageBuilder.getKnowledgePackages() ); return list; } @Test public void testPersistenceSubProcess() { setUp(); Properties properties = new Properties(); properties.setProperty( "drools.commandService", PersistableRunner.class.getName() ); properties.setProperty( "drools.processInstanceManagerFactory", JPAProcessInstanceManagerFactory.class.getName() ); properties.setProperty( "drools.workItemManagerFactory", JPAWorkItemManagerFactory.class.getName() ); properties.setProperty( "drools.processSignalManagerFactory", JPASignalManagerFactory.class.getName() ); properties.setProperty( "drools.timerService", JpaJDKTimerService.class.getName() ); SessionConfiguration config = SessionConfiguration.newInstance( properties ); InternalKnowledgeBase ruleBase = KnowledgeBaseFactory.newKnowledgeBase(); KiePackage pkg = getProcessSubProcess(); ruleBase.addPackages( (Collection) Arrays.asList(pkg) ); PersistableRunner service = new PersistableRunner( ruleBase, config, env ); Long sessionId = service.getSessionId(); StartProcessCommand startProcessCommand = new StartProcessCommand(); startProcessCommand.setProcessId( "org.drools.test.TestProcess" ); RuleFlowProcessInstance processInstance = (RuleFlowProcessInstance) service.execute( startProcessCommand ); logger.info( "Started process instance {}", processInstance.getId() ); long processInstanceId = processInstance.getId(); TestWorkItemHandler handler = TestWorkItemHandler.getInstance(); WorkItem workItem = handler.getWorkItem(); assertNotNull( workItem ); service.dispose(); service = new PersistableRunner( sessionId, ruleBase, config, env ); GetProcessInstanceCommand getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstanceId ); processInstance = (RuleFlowProcessInstance) service.execute( getProcessInstanceCommand ); assertNotNull( processInstance ); Collection<NodeInstance> nodeInstances = processInstance.getNodeInstances(); assertEquals( 1, nodeInstances.size() ); SubProcessNodeInstance subProcessNodeInstance = (SubProcessNodeInstance) nodeInstances.iterator().next(); long subProcessInstanceId = subProcessNodeInstance.getProcessInstanceId(); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( subProcessInstanceId ); RuleFlowProcessInstance subProcessInstance = (RuleFlowProcessInstance) service.execute( getProcessInstanceCommand ); assertNotNull( subProcessInstance ); service.dispose(); service = new PersistableRunner( sessionId, ruleBase, config, env ); CompleteWorkItemCommand completeWorkItemCommand = new CompleteWorkItemCommand(); completeWorkItemCommand.setWorkItemId( workItem.getId() ); service.execute( completeWorkItemCommand ); service.dispose(); service = new PersistableRunner( sessionId, ruleBase, config, env ); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( subProcessInstanceId ); subProcessInstance = (RuleFlowProcessInstance) service.execute( getProcessInstanceCommand ); assertNull( subProcessInstance ); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstanceId ); processInstance = (RuleFlowProcessInstance) service.execute( getProcessInstanceCommand ); assertNull( processInstance ); service.dispose(); } private InternalKnowledgePackage getProcessSubProcess() { RuleFlowProcess process = new RuleFlowProcess(); process.setId( "org.drools.test.TestProcess" ); process.setName( "TestProcess" ); process.setPackageName( "org.drools.test" ); StartNode start = new StartNode(); start.setId( 1 ); start.setName( "Start" ); process.addNode( start ); ActionNode actionNode = new ActionNode(); actionNode.setId( 2 ); actionNode.setName( "Action" ); DroolsConsequenceAction action = new DroolsConsequenceAction(); action.setDialect( "java" ); action.setConsequence( "System.out.println(\"Executed action\");" ); actionNode.setAction( action ); process.addNode( actionNode ); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE ); SubProcessNode subProcessNode = new SubProcessNode(); subProcessNode.setId( 3 ); subProcessNode.setName( "SubProcess" ); subProcessNode.setProcessId( "org.drools.test.SubProcess" ); process.addNode( subProcessNode ); new ConnectionImpl( actionNode, Node.CONNECTION_DEFAULT_TYPE, subProcessNode, Node.CONNECTION_DEFAULT_TYPE ); EndNode end = new EndNode(); end.setId( 4 ); end.setName( "End" ); process.addNode( end ); new ConnectionImpl( subProcessNode, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE ); KnowledgeBuilderImpl packageBuilder = new KnowledgeBuilderImpl(); ProcessBuilderImpl processBuilder = new ProcessBuilderImpl( packageBuilder ); processBuilder.buildProcess( process, null ); process = new RuleFlowProcess(); process.setId( "org.drools.test.SubProcess" ); process.setName( "SubProcess" ); process.setPackageName( "org.drools.test" ); start = new StartNode(); start.setId( 1 ); start.setName( "Start" ); process.addNode( start ); actionNode = new ActionNode(); actionNode.setId( 2 ); actionNode.setName( "Action" ); action = new DroolsConsequenceAction(); action.setDialect( "java" ); action.setConsequence( "System.out.println(\"Executed action\");" ); actionNode.setAction( action ); process.addNode( actionNode ); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE ); WorkItemNode workItemNode = new WorkItemNode(); workItemNode.setId( 3 ); workItemNode.setName( "WorkItem1" ); Work work = new WorkImpl(); work.setName( "MyWork" ); workItemNode.setWork( work ); process.addNode( workItemNode ); new ConnectionImpl( actionNode, Node.CONNECTION_DEFAULT_TYPE, workItemNode, Node.CONNECTION_DEFAULT_TYPE ); end = new EndNode(); end.setId( 6 ); end.setName( "End" ); process.addNode( end ); new ConnectionImpl( workItemNode, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE ); processBuilder.buildProcess( process, null ); return packageBuilder.getPackage("org.drools.test"); } @Test public void testPersistenceTimer() throws Exception { setUp(); Properties properties = new Properties(); properties.setProperty( "drools.commandService", PersistableRunner.class.getName() ); properties.setProperty( "drools.processInstanceManagerFactory", JPAProcessInstanceManagerFactory.class.getName() ); properties.setProperty( "drools.workItemManagerFactory", JPAWorkItemManagerFactory.class.getName() ); properties.setProperty( "drools.processSignalManagerFactory", JPASignalManagerFactory.class.getName() ); SessionConfiguration config = SessionConfiguration.newInstance( properties ); config.setOption( TimerJobFactoryOption.get(TimerJobFactoryType.JPA.getId()) ); InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); Collection<KiePackage> kpkgs = getProcessTimer(); kbase.addPackages( kpkgs ); PersistableRunner service = new PersistableRunner( kbase, config, env ); Long sessionId = service.getSessionId(); StartProcessCommand startProcessCommand = new StartProcessCommand(); startProcessCommand.setProcessId( "org.drools.test.TestProcess" ); ProcessInstance processInstance = service.execute( startProcessCommand ); logger.info( "Started process instance {}", processInstance.getId() ); Thread.sleep( 500 ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); GetProcessInstanceCommand getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); assertNotNull( processInstance ); service.dispose(); service = new PersistableRunner( sessionId, kbase, config, env ); Thread.sleep( 5000 ); getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); assertNull( processInstance ); } private List<KiePackage> getProcessTimer() { RuleFlowProcess process = new RuleFlowProcess(); process.setId( "org.drools.test.TestProcess" ); process.setName( "TestProcess" ); process.setPackageName( "org.drools.test" ); StartNode start = new StartNode(); start.setId( 1 ); start.setName( "Start" ); process.addNode( start ); TimerNode timerNode = new TimerNode(); timerNode.setId( 2 ); timerNode.setName( "Timer" ); Timer timer = new Timer(); timer.setDelay( "2000" ); timerNode.setTimer( timer ); process.addNode( timerNode ); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, timerNode, Node.CONNECTION_DEFAULT_TYPE ); ActionNode actionNode = new ActionNode(); actionNode.setId( 3 ); actionNode.setName( "Action" ); DroolsConsequenceAction action = new DroolsConsequenceAction(); action.setDialect( "java" ); action.setConsequence( "System.out.println(\"Executed action\");" ); actionNode.setAction( action ); process.addNode( actionNode ); new ConnectionImpl( timerNode, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE ); EndNode end = new EndNode(); end.setId( 6 ); end.setName( "End" ); process.addNode( end ); new ConnectionImpl( actionNode, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE ); KnowledgeBuilderImpl packageBuilder = new KnowledgeBuilderImpl(); ProcessBuilderImpl processBuilder = new ProcessBuilderImpl( packageBuilder ); processBuilder.buildProcess( process, null ); return Arrays.asList(packageBuilder.getPackages()); } @Test public void testPersistenceTimer2() throws Exception { setUp(); Properties properties = new Properties(); properties.setProperty( "drools.commandService", PersistableRunner.class.getName() ); properties.setProperty( "drools.processInstanceManagerFactory", JPAProcessInstanceManagerFactory.class.getName() ); properties.setProperty( "drools.workItemManagerFactory", JPAWorkItemManagerFactory.class.getName() ); properties.setProperty( "drools.processSignalManagerFactory", JPASignalManagerFactory.class.getName() ); SessionConfiguration config = SessionConfiguration.newInstance( properties ); config.setOption( TimerJobFactoryOption.get(TimerJobFactoryType.JPA.getId()) ); InternalKnowledgeBase kbase = KnowledgeBaseFactory.newKnowledgeBase(); Collection<KiePackage> kpkgs = getProcessTimer2(); kbase.addPackages( kpkgs ); PersistableRunner service = new PersistableRunner( kbase, config, env ); Long sessionId = service.getSessionId(); StartProcessCommand startProcessCommand = new StartProcessCommand(); startProcessCommand.setProcessId( "org.drools.test.TestProcess" ); ProcessInstance processInstance = service.execute( startProcessCommand ); logger.info( "Started process instance {}", processInstance.getId() ); Thread.sleep( 2000 ); service = new PersistableRunner( sessionId, kbase, config, env ); GetProcessInstanceCommand getProcessInstanceCommand = new GetProcessInstanceCommand(); getProcessInstanceCommand.setProcessInstanceId( processInstance.getId() ); processInstance = service.execute( getProcessInstanceCommand ); assertNull( processInstance ); } private List<KiePackage> getProcessTimer2() { RuleFlowProcess process = new RuleFlowProcess(); process.setId( "org.drools.test.TestProcess" ); process.setName( "TestProcess" ); process.setPackageName( "org.drools.test" ); StartNode start = new StartNode(); start.setId( 1 ); start.setName( "Start" ); process.addNode( start ); TimerNode timerNode = new TimerNode(); timerNode.setId( 2 ); timerNode.setName( "Timer" ); Timer timer = new Timer(); timer.setDelay( "0" ); timerNode.setTimer( timer ); process.addNode( timerNode ); new ConnectionImpl( start, Node.CONNECTION_DEFAULT_TYPE, timerNode, Node.CONNECTION_DEFAULT_TYPE ); ActionNode actionNode = new ActionNode(); actionNode.setId( 3 ); actionNode.setName( "Action" ); DroolsConsequenceAction action = new DroolsConsequenceAction(); action.setDialect( "java" ); action.setConsequence( "try { Thread.sleep(1000); } catch (Throwable t) {} System.out.println(\"Executed action\");" ); actionNode.setAction( action ); process.addNode( actionNode ); new ConnectionImpl( timerNode, Node.CONNECTION_DEFAULT_TYPE, actionNode, Node.CONNECTION_DEFAULT_TYPE ); EndNode end = new EndNode(); end.setId( 6 ); end.setName( "End" ); process.addNode( end ); new ConnectionImpl( actionNode, Node.CONNECTION_DEFAULT_TYPE, end, Node.CONNECTION_DEFAULT_TYPE ); KnowledgeBuilderImpl packageBuilder = new KnowledgeBuilderImpl(); ProcessBuilderImpl processBuilder = new ProcessBuilderImpl( packageBuilder ); processBuilder.buildProcess( process, null ); return Arrays.asList(packageBuilder.getPackages()); } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * @author max */ package com.intellij.util.io; import com.intellij.openapi.Forceable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.ThrowableNotNullFunction; import com.intellij.openapi.util.io.FileUtilRt; import com.intellij.util.ExceptionUtil; import com.intellij.util.SmartList; import com.intellij.util.SystemProperties; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.lang.CompoundRuntimeException; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.util.List; public class ResizeableMappedFile implements Forceable { private static final Logger LOG = Logger.getInstance(ResizeableMappedFile.class); private static final boolean truncateOnClose = SystemProperties.getBooleanProperty("idea.resizeable.file.truncate.on.close", false); private volatile long myLogicalSize; private volatile long myLastWrittenLogicalSize; private final PagedFileStorage myStorage; private final int myInitialSize; static final int DEFAULT_ALLOCATION_ROUND_FACTOR = 4096; private int myRoundFactor = DEFAULT_ALLOCATION_ROUND_FACTOR; public ResizeableMappedFile(@NotNull Path file, int initialSize, @Nullable StorageLockContext lockContext, int pageSize, boolean valuesAreBufferAligned) throws IOException { this(file, initialSize, lockContext, pageSize, valuesAreBufferAligned, false); } public ResizeableMappedFile(@NotNull Path file, int initialSize, @Nullable StorageLockContext lockContext, int pageSize, boolean valuesAreBufferAligned, boolean nativeBytesOrder) throws IOException { myStorage = new PagedFileStorage(file, lockContext, pageSize, valuesAreBufferAligned, nativeBytesOrder); ensureParentDirectoryExists(); myInitialSize = initialSize; myLastWrittenLogicalSize = myLogicalSize = readLength(); } public void clear() throws IOException { myStorage.resize(0); myLogicalSize = 0; myLastWrittenLogicalSize = 0; } public long length() { return myLogicalSize; } private long realSize() { return myStorage.length(); } void ensureSize(final long pos) { myLogicalSize = Math.max(pos, myLogicalSize); expand(pos); } public void setRoundFactor(int roundFactor) { myRoundFactor = roundFactor; } private void expand(final long max) { long realSize = realSize(); if (max <= realSize) return; long suggestedSize; if (realSize == 0) { suggestedSize = doRoundToFactor(Math.max(myInitialSize, max)); } else { suggestedSize = Math.max(realSize + 1, 2); // suggestedSize should increase with int multiplication on 1.625 factor while (max > suggestedSize) { long newSuggestedSize = suggestedSize * 13 >> 3; if (newSuggestedSize >= Integer.MAX_VALUE) { suggestedSize += suggestedSize / 5; } else { suggestedSize = newSuggestedSize; } } suggestedSize = doRoundToFactor(suggestedSize); } try { myStorage.resize(suggestedSize); } catch (IOException e) { throw new RuntimeException(e); } } private long doRoundToFactor(long suggestedSize) { int roundFactor = myRoundFactor; if (suggestedSize % roundFactor != 0) { suggestedSize = (suggestedSize / roundFactor + 1) * roundFactor; } return suggestedSize; } private Path getLengthFile() { Path file = myStorage.getFile(); return file.resolveSibling(file.getFileName() + ".len"); } private void writeLength(final long len) { final Path lengthFile = getLengthFile(); try (DataOutputStream stream = FileUtilRt.doIOOperation(lastAttempt -> { try { return new DataOutputStream(Files.newOutputStream(lengthFile)); } catch (NoSuchFileException ex) { ensureParentDirectoryExists(); if (!lastAttempt) return null; throw ex; } })) { if (stream != null) { stream.writeLong(len); } } catch (IOException e) { LOG.error(e); } } @Override public boolean isDirty() { return myStorage.isDirty(); } @Override public void force() throws IOException { ensureLengthWritten(); myStorage.force(); } private void ensureLengthWritten() { if (myLastWrittenLogicalSize != myLogicalSize) { writeLength(myLogicalSize); myLastWrittenLogicalSize = myLogicalSize; } } private void ensureParentDirectoryExists() throws IOException { Path parent = getLengthFile().getParent(); if (!Files.exists(parent)) { Files.createDirectories(parent); } } private long readLength() throws IOException { Path lengthFile = getLengthFile(); long zero = 0L; if (!Files.exists(lengthFile) && (!Files.exists(myStorage.getFile()) || Files.size(myStorage.getFile()) == zero)) { writeLength(zero); return zero; } try (DataInputStream stream = new DataInputStream(Files.newInputStream(lengthFile, StandardOpenOption.READ))) { return stream.readLong(); } catch (IOException e) { long realSize = realSize(); writeLength(realSize); LOG.error("storage size = " + realSize + ", file size = " + Files.size(myStorage.getFile()), e); return realSize; } } public int getInt(long index) throws IOException { return myStorage.getInt(index); } public void putInt(long index, int value) throws IOException { ensureSize(index + 4); myStorage.putInt(index, value); } public long getLong(long index) throws IOException { return myStorage.getLong(index); } public void putLong(long index, long value) throws IOException { ensureSize(index + 8); myStorage.putLong(index, value); } public byte get(long index) throws IOException { return myStorage.get(index); } public void get(long index, byte[] dst, int offset, int length) throws IOException { myStorage.get(index, dst, offset, length); } public void put(long index, byte[] src, int offset, int length) throws IOException { ensureSize(index + length); myStorage.put(index, src, offset, length); } public void close() throws IOException { List<Exception> exceptions = new SmartList<>(); ContainerUtil.addIfNotNull(exceptions, ExceptionUtil.runAndCatch(() -> { if (!getLengthFile().getFileSystem().isReadOnly()) { writeLength(myLogicalSize); } else { assert myLogicalSize == myLastWrittenLogicalSize; } myStorage.force(); if (truncateOnClose && myLogicalSize < myStorage.length()) { myStorage.resize(myLogicalSize); } })); ContainerUtil.addIfNotNull(exceptions, ExceptionUtil.runAndCatch(() -> myStorage.close())); if (!exceptions.isEmpty()) { throw new IOException(new CompoundRuntimeException(exceptions)); } } @NotNull public PagedFileStorage getPagedFileStorage() { return myStorage; } @NotNull public StorageLockContext getStorageLockContext() { return myStorage.getStorageLockContext(); } public <R> @NotNull R readInputStream(@NotNull ThrowableNotNullFunction<? super InputStream, R, ? extends IOException> consumer) throws IOException { return myStorage.readInputStream(consumer); } public void lockRead() { myStorage.lockRead(); } public void unlockRead() { myStorage.unlockRead(); } public void lockWrite() { myStorage.lockWrite(); } public void unlockWrite() { myStorage.unlockWrite(); } }
/* * * Paros and its related class files. * * Paros is an HTTP/HTTPS proxy for assessing web application security. * Copyright (C) 2003-2004 Chinotec Technologies Company * * This program is free software; you can redistribute it and/or * modify it under the terms of the Clarified Artistic License * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * Clarified Artistic License for more details. * * You should have received a copy of the Clarified Artistic License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ // ZAP: 2012/03/15 Removed the options to change the display of the ManualRequestEditorDialog, // now they are changed dynamically. // ZAP: 2012/04/25 Added @Override annotation to all appropriate methods. // ZAP: 2013/12/13 Added support for a new option 'show tab names'. // ZAP: 2014/04/25 Issue 642: Add timestamps to Output tab(s) // ZAP: 2014/10/09 Issue 1359: Options for splash screen // ZAP: 2014/12/16 Issue 1466: Config option for 'large display' size package org.parosproxy.paros.extension.option; import java.awt.CardLayout; import java.awt.Color; import java.awt.Component; import java.awt.Font; import java.awt.GraphicsEnvironment; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.ItemEvent; import javax.swing.BorderFactory; import javax.swing.DefaultListCellRenderer; import javax.swing.JCheckBox; import javax.swing.JComboBox; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JPanel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.plaf.basic.BasicComboBoxRenderer; import org.parosproxy.paros.Constant; import org.parosproxy.paros.model.Model; import org.parosproxy.paros.model.OptionsParam; import org.parosproxy.paros.view.AbstractParamPanel; import org.zaproxy.zap.extension.httppanel.view.largerequest.LargeRequestUtil; import org.zaproxy.zap.extension.httppanel.view.largeresponse.LargeResponseUtil; import org.zaproxy.zap.utils.FontUtils; import org.zaproxy.zap.utils.TimeStampUtils; import org.zaproxy.zap.utils.ZapNumberSpinner; import org.zaproxy.zap.view.LayoutHelper; // ZAP: 2011: added more configuration options public class OptionsViewPanel extends AbstractParamPanel { private static final long serialVersionUID = 1L; private static final String TIME_STAMP_FORMAT_COMBOBOX_TOOL_TIP = Constant.messages.getString("options.display.timestamp.format.combobox.tooltip"); private static final String TIME_STAMP_FORMAT_DATETIME = Constant.messages.getString("timestamp.format.datetime"); private static final String TIME_STAMP_FORMAT_ISO8601 = Constant.messages.getString("timestamp.format.iso8601"); private static final String TIME_STAMP_FORMAT_TIMEONLY = Constant.messages.getString("timestamp.format.timeonly"); private JPanel panelMisc = null; private JCheckBox chkShowTabNames = null; private JCheckBox chkProcessImages = null; private JCheckBox chkShowMainToolbar = null; private JCheckBox chkAdvancedView = null; private JCheckBox chkAskOnExit = null; private JCheckBox chkWmUiHandling = null; private JCheckBox chkOutputTabTimeStamping = null; private JCheckBox chkShowSplashScreen = null; private JCheckBox scaleImages = null; private JComboBox<String> brkPanelViewSelect = null; private JComboBox<String> displaySelect = null; private JComboBox<String> timeStampsFormatSelect = null; private JComboBox<String> fontName = null; private ZapNumberSpinner largeRequestSize = null; private ZapNumberSpinner largeResponseSize = null; private ZapNumberSpinner fontSize = null; private JLabel brkPanelViewLabel = null; private JLabel advancedViewLabel = null; private JLabel wmUiHandlingLabel = null; private JLabel askOnExitLabel = null; private JLabel displayLabel = null; private JLabel showMainToolbarLabel = null; private JLabel processImagesLabel = null; private JLabel showTabNamesLabel = null; private JLabel outputTabTimeStampLabel = null; private JLabel outputTabTimeStampExampleLabel = null; private JLabel showSplashScreenLabel = null; private JLabel largeRequestLabel = null; private JLabel largeResponseLabel = null; private JLabel fontExampleLabel = null; public OptionsViewPanel() { super(); initialize(); } /** * This method initializes this */ private void initialize() { this.setLayout(new CardLayout()); this.setName(Constant.messages.getString("view.options.title")); this.add(getPanelMisc(), getPanelMisc().getName()); } /** * This method initializes panelMisc * * @return javax.swing.JPanel */ private JPanel getPanelMisc() { if (panelMisc == null) { panelMisc = new JPanel(); panelMisc.setLayout(new GridBagLayout()); if (Model.getSingleton().getOptionsParam().getViewParam().getWmUiHandlingOption() == 0) { panelMisc.setSize(114, 132); } panelMisc.setName(Constant.messages.getString("view.options.misc.title")); displayLabel = new JLabel(Constant.messages.getString("view.options.label.display")); brkPanelViewLabel = new JLabel(Constant.messages.getString("view.options.label.brkPanelView")); advancedViewLabel = new JLabel(Constant.messages.getString("view.options.label.advancedview")); wmUiHandlingLabel = new JLabel(Constant.messages.getString("view.options.label.wmuihandler")); askOnExitLabel = new JLabel(Constant.messages.getString("view.options.label.askonexit")); showMainToolbarLabel = new JLabel(Constant.messages.getString("view.options.label.showMainToolbar")); processImagesLabel = new JLabel(Constant.messages.getString("view.options.label.processImages")); showTabNamesLabel = new JLabel(Constant.messages.getString("view.options.label.showTabNames")); outputTabTimeStampLabel = new JLabel(Constant.messages.getString("options.display.timestamp.format.outputtabtimestamps.label")); largeRequestLabel = new JLabel(Constant.messages.getString("view.options.label.largeRequestSize")); largeResponseLabel = new JLabel(Constant.messages.getString("view.options.label.largeResponseSize")); outputTabTimeStampExampleLabel = new JLabel(TimeStampUtils.currentDefaultFormattedTimeStamp()); showSplashScreenLabel = new JLabel(Constant.messages.getString("view.options.label.showSplashScreen")); displayLabel.setLabelFor(getDisplaySelect()); panelMisc.add(displayLabel, LayoutHelper.getGBC(0, 0, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getDisplaySelect(), LayoutHelper.getGBC(1, 0, 1, 1.0D, new java.awt.Insets(2,2,2,2))); brkPanelViewLabel.setLabelFor(getBrkPanelViewSelect()); panelMisc.add(brkPanelViewLabel, LayoutHelper.getGBC(0, 1, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getBrkPanelViewSelect(), LayoutHelper.getGBC(1, 1, 1, 1.0D, new java.awt.Insets(2,2,2,2))); largeRequestLabel.setLabelFor(getLargeRequestSize()); panelMisc.add(largeRequestLabel, LayoutHelper.getGBC(0, 2, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getLargeRequestSize(), LayoutHelper.getGBC(1, 2, 1, 1.0D, new java.awt.Insets(2,2,2,2))); largeResponseLabel.setLabelFor(getLargeResponseSize()); panelMisc.add(largeResponseLabel, LayoutHelper.getGBC(0, 3, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getLargeResponseSize(), LayoutHelper.getGBC(1, 3, 1, 1.0D, new java.awt.Insets(2,2,2,2))); advancedViewLabel.setLabelFor(getChkAdvancedView()); panelMisc.add(advancedViewLabel, LayoutHelper.getGBC(0, 4, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getChkAdvancedView(), LayoutHelper.getGBC(1, 4, 1, 1.0D, new java.awt.Insets(2,2,2,2))); wmUiHandlingLabel.setLabelFor(getChkWmUiHandling()); panelMisc.add(wmUiHandlingLabel, LayoutHelper.getGBC(0, 5, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getChkWmUiHandling(), LayoutHelper.getGBC(1, 5, 1, 1.0D, new java.awt.Insets(2,2,2,2))); askOnExitLabel.setLabelFor(getChkAskOnExit()); panelMisc.add(askOnExitLabel, LayoutHelper.getGBC(0, 6, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getChkAskOnExit(), LayoutHelper.getGBC(1, 6, 1, 1.0D, new java.awt.Insets(2,2,2,2))); showMainToolbarLabel.setLabelFor(getChkShowMainToolbar()); panelMisc.add(showMainToolbarLabel, LayoutHelper.getGBC(0, 7, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getChkShowMainToolbar(), LayoutHelper.getGBC(1, 7, 1, 1.0D, new java.awt.Insets(2,2,2,2))); processImagesLabel.setLabelFor(getChkProcessImages()); panelMisc.add(processImagesLabel, LayoutHelper.getGBC(0, 8, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getChkProcessImages(), LayoutHelper.getGBC(1, 8, 1, 1.0D, new java.awt.Insets(2,2,2,2))); showTabNamesLabel.setLabelFor(getShowTabNames()); panelMisc.add(showTabNamesLabel, LayoutHelper.getGBC(0, 9, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getShowTabNames(), LayoutHelper.getGBC(1, 9, 1, 1.0D, new java.awt.Insets(2,2,2,2))); showSplashScreenLabel.setLabelFor(getShowSplashScreen()); panelMisc.add(showSplashScreenLabel, LayoutHelper.getGBC(0, 10, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getShowSplashScreen(), LayoutHelper.getGBC(1, 10, 1, 1.0D, new java.awt.Insets(2,2,2,2))); outputTabTimeStampLabel.setLabelFor(getChkOutputTabTimeStamps()); panelMisc.add(outputTabTimeStampLabel, LayoutHelper.getGBC(0, 11, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getChkOutputTabTimeStamps(), LayoutHelper.getGBC(1, 11, 1, 1.0D, new java.awt.Insets(2,2,2,2))); outputTabTimeStampExampleLabel.setLabelFor(getTimeStampsFormatSelect()); panelMisc.add(getTimeStampsFormatSelect(), LayoutHelper.getGBC(0, 12, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(outputTabTimeStampExampleLabel, LayoutHelper.getGBC(1, 12, 1, 1.0D, new java.awt.Insets(2,2,2,2))); JLabel fontNameLabel = new JLabel(Constant.messages.getString("view.options.label.fontName")); fontNameLabel.setLabelFor(getFontName()); panelMisc.add(fontNameLabel, LayoutHelper.getGBC(0, 13, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getFontName(), LayoutHelper.getGBC(1, 13, 1, 1.0D, new java.awt.Insets(2,2,2,2))); JLabel fontSizeLabel = new JLabel(Constant.messages.getString("view.options.label.fontSize")); fontSizeLabel.setLabelFor(getFontSize()); panelMisc.add(fontSizeLabel, LayoutHelper.getGBC(0, 14, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getFontSize(), LayoutHelper.getGBC(1, 14, 1, 1.0D, new java.awt.Insets(2,2,2,2))); JLabel fontExampleLabel = new JLabel(Constant.messages.getString("view.options.label.fontExample")); fontExampleLabel.setLabelFor(getFontExampleLabel()); panelMisc.add(fontExampleLabel, LayoutHelper.getGBC(0, 15, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getFontExampleLabel(), LayoutHelper.getGBC(1, 15, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(new JLabel(""), LayoutHelper.getGBC(0, 20, 1, 1.0D, 1.0D)); JLabel scaleImagesLabel = new JLabel(Constant.messages.getString("view.options.label.scaleImages")); fontExampleLabel.setLabelFor(getScaleImages()); panelMisc.add(scaleImagesLabel, LayoutHelper.getGBC(0, 16, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(getScaleImages(), LayoutHelper.getGBC(1, 16, 1, 1.0D, new java.awt.Insets(2,2,2,2))); panelMisc.add(new JLabel(""), LayoutHelper.getGBC(0, 20, 1, 1.0D, 1.0D)); } return panelMisc; } private JCheckBox getShowTabNames() { if (chkShowTabNames == null) { chkShowTabNames = new JCheckBox(); chkShowTabNames.setVerticalAlignment(javax.swing.SwingConstants.TOP); chkShowTabNames.setVerticalTextPosition(javax.swing.SwingConstants.TOP); } return chkShowTabNames; } private JCheckBox getShowSplashScreen() { if (chkShowSplashScreen == null) { chkShowSplashScreen = new JCheckBox(); chkShowSplashScreen.setVerticalAlignment(javax.swing.SwingConstants.TOP); chkShowSplashScreen.setVerticalTextPosition(javax.swing.SwingConstants.TOP); } return chkShowSplashScreen; } private JCheckBox getChkProcessImages() { if (chkProcessImages == null) { chkProcessImages = new JCheckBox(); chkProcessImages.setVerticalAlignment(javax.swing.SwingConstants.TOP); chkProcessImages.setVerticalTextPosition(javax.swing.SwingConstants.TOP); } return chkProcessImages; } private JCheckBox getChkShowMainToolbar() { if (chkShowMainToolbar == null) { chkShowMainToolbar = new JCheckBox(); chkShowMainToolbar.setVerticalAlignment(javax.swing.SwingConstants.TOP); chkShowMainToolbar.setVerticalTextPosition(javax.swing.SwingConstants.TOP); } return chkShowMainToolbar; } private JCheckBox getChkWmUiHandling() { if (chkWmUiHandling == null) { chkWmUiHandling = new JCheckBox(); chkWmUiHandling.setVerticalAlignment(javax.swing.SwingConstants.TOP); chkWmUiHandling.setVerticalTextPosition(javax.swing.SwingConstants.TOP); } return chkWmUiHandling; } private JCheckBox getChkAskOnExit() { if (chkAskOnExit == null) { chkAskOnExit = new JCheckBox(); chkAskOnExit.setVerticalAlignment(javax.swing.SwingConstants.TOP); chkAskOnExit.setVerticalTextPosition(javax.swing.SwingConstants.TOP); } return chkAskOnExit; } private JComboBox<String> getDisplaySelect() { if (displaySelect == null) { displaySelect = new JComboBox<>(); displaySelect.addItem(Constant.messages.getString("view.options.label.display.left")); displaySelect.addItem(Constant.messages.getString("view.options.label.display.bottom")); displaySelect.addItem(Constant.messages.getString("view.options.label.display.full")); } return displaySelect; } private JComboBox<String> getBrkPanelViewSelect() { if (brkPanelViewSelect == null) { brkPanelViewSelect = new JComboBox<>(); brkPanelViewSelect.addItem(Constant.messages.getString("view.options.label.brkPanelView.toolbaronly")); brkPanelViewSelect.addItem(Constant.messages.getString("view.options.label.brkPanelView.breakonly")); brkPanelViewSelect.addItem(Constant.messages.getString("view.options.label.brkPanelView.both")); } return brkPanelViewSelect; } private JCheckBox getChkAdvancedView() { if (chkAdvancedView == null) { chkAdvancedView = new JCheckBox(); chkAdvancedView.setVerticalAlignment(javax.swing.SwingConstants.TOP); chkAdvancedView.setVerticalTextPosition(javax.swing.SwingConstants.TOP); } return chkAdvancedView; } private JCheckBox getChkOutputTabTimeStamps() { if (chkOutputTabTimeStamping == null) { chkOutputTabTimeStamping = new JCheckBox(); chkOutputTabTimeStamping.setVerticalAlignment(javax.swing.SwingConstants.TOP); chkOutputTabTimeStamping.setVerticalTextPosition(javax.swing.SwingConstants.TOP); chkOutputTabTimeStamping.addItemListener(new java.awt.event.ItemListener(){ @Override public void itemStateChanged(ItemEvent e) { timeStampsFormatSelect.setEnabled(e.getStateChange() == ItemEvent.SELECTED); }; }); } return chkOutputTabTimeStamping; } private JComboBox<String> getTimeStampsFormatSelect() { if (timeStampsFormatSelect == null) { String[] timeStampFormatStrings = {TIME_STAMP_FORMAT_DATETIME, TIME_STAMP_FORMAT_ISO8601, TIME_STAMP_FORMAT_TIMEONLY}; timeStampsFormatSelect = new JComboBox<String>(timeStampFormatStrings); timeStampsFormatSelect.setToolTipText(TIME_STAMP_FORMAT_COMBOBOX_TOOL_TIP); timeStampsFormatSelect.setSelectedItem(getTimeStampsFormatSelect().getSelectedItem()); timeStampsFormatSelect.setEditable(true); if (chkOutputTabTimeStamping.isSelected()) //The drop-down should only be enabled if time stamping is turned on timeStampsFormatSelect.setEnabled(true); else timeStampsFormatSelect.setEnabled(false); timeStampsFormatSelect.addActionListener(new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { String selectedDateFormat = (String)getTimeStampsFormatSelect().getSelectedItem(); outputTabTimeStampExampleLabel.setText(TimeStampUtils.currentFormattedTimeStamp(selectedDateFormat)); }; }); } return timeStampsFormatSelect; } private ZapNumberSpinner getLargeRequestSize() { if (largeRequestSize == null) { largeRequestSize = new ZapNumberSpinner(-1, LargeRequestUtil.DEFAULT_MIN_CONTENT_LENGTH, Integer.MAX_VALUE); } return largeRequestSize; } private ZapNumberSpinner getLargeResponseSize() { if (largeResponseSize == null) { largeResponseSize = new ZapNumberSpinner(-1, LargeResponseUtil.DEFAULT_MIN_CONTENT_LENGTH, Integer.MAX_VALUE); } return largeResponseSize; } private ZapNumberSpinner getFontSize() { if (fontSize == null) { fontSize = new ZapNumberSpinner(-1, 8, 100); if (! FontUtils.canChangeSize()) { fontSize.setEnabled(false); } fontSize.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent e) { // Show what the default font will look like setExampleFont(); }}); } return fontSize; } private void setExampleFont() { String name; if (getFontName().getSelectedItem() == null) { name = ""; } else { name = (String)getFontName().getSelectedItem(); } Font font = FontUtils.getFont(name); int size = getFontSize().getValue(); if (size == -1) { size = FontUtils.getSystemDefaultFont().getSize(); } getFontExampleLabel().setFont(font.deriveFont((float)size)); } @SuppressWarnings("unchecked") private JComboBox<String> getFontName() { if (fontName == null) { fontName = new JComboBox<String>(); fontName.setRenderer(new JComboBoxFontRenderer()); String fonts[] = GraphicsEnvironment.getLocalGraphicsEnvironment().getAvailableFontFamilyNames(); fontName.addItem(" "); // Default to system font for (String font : fonts) { fontName.addItem(font); } if (! FontUtils.canChangeSize()) { fontName.setEnabled(false); } fontName.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { // Show what the default font will look like setExampleFont(); }}); } return fontName; } private JLabel getFontExampleLabel() { if (fontExampleLabel == null) { fontExampleLabel = new JLabel(Constant.messages.getString("view.options.label.exampleText")); fontExampleLabel.setBorder(BorderFactory.createLineBorder(Color.BLACK)); } return fontExampleLabel; } private JCheckBox getScaleImages() { if (scaleImages == null) { scaleImages = new JCheckBox(); if (! FontUtils.canChangeSize()) { scaleImages.setEnabled(false); } } return scaleImages; } @Override public void initParam(Object obj) { OptionsParam options = (OptionsParam) obj; getShowTabNames().setSelected(options.getViewParam().getShowTabNames()); getShowSplashScreen().setSelected(options.getViewParam().isShowSplashScreen()); getChkProcessImages().setSelected(options.getViewParam().getProcessImages() > 0); displaySelect.setSelectedIndex(options.getViewParam().getDisplayOption()); brkPanelViewSelect.setSelectedIndex(options.getViewParam().getBrkPanelViewOption()); getChkShowMainToolbar().setSelected(options.getViewParam().getShowMainToolbar() > 0); chkAdvancedView.setSelected(options.getViewParam().getAdvancedViewOption() > 0); chkAskOnExit.setSelected(options.getViewParam().getAskOnExitOption() > 0); chkWmUiHandling.setSelected(options.getViewParam().getWmUiHandlingOption() > 0); getChkOutputTabTimeStamps().setSelected(options.getViewParam().isOutputTabTimeStampingEnabled()); timeStampsFormatSelect.setSelectedItem(options.getViewParam().getOutputTabTimeStampsFormat()); largeRequestSize.setValue(options.getViewParam().getLargeRequestSize()); largeResponseSize.setValue(options.getViewParam().getLargeResponseSize()); getFontSize().setValue(options.getViewParam().getFontSize()); getFontName().setSelectedItem(options.getViewParam().getFontName()); getScaleImages().setSelected(options.getViewParam().isScaleImages()); } @Override public void validateParam(Object obj) { // no validation needed } @Override public void saveParam (Object obj) throws Exception { OptionsParam options = (OptionsParam) obj; options.getViewParam().setShowTabNames(getShowTabNames().isSelected()); options.getViewParam().setShowSplashScreen(getShowSplashScreen().isSelected()); options.getViewParam().setProcessImages((getChkProcessImages().isSelected()) ? 1 : 0); options.getViewParam().setDisplayOption(displaySelect.getSelectedIndex()); options.getViewParam().setBrkPanelViewOption(brkPanelViewSelect.getSelectedIndex()); options.getViewParam().setShowMainToolbar((getChkShowMainToolbar().isSelected()) ? 1 : 0); options.getViewParam().setAdvancedViewOption(getChkAdvancedView().isSelected() ? 1 : 0); options.getViewParam().setAskOnExitOption(getChkAskOnExit().isSelected() ? 1 : 0); options.getViewParam().setWmUiHandlingOption(getChkWmUiHandling().isSelected() ? 1 : 0); options.getViewParam().setOutputTabTimeStampingEnabled(getChkOutputTabTimeStamps().isSelected()); options.getViewParam().setOutputTabTimeStampsFormat((String) getTimeStampsFormatSelect().getSelectedItem()); options.getViewParam().setLargeRequestSize(getLargeRequestSize().getValue()); options.getViewParam().setLargeResponseSize(getLargeResponseSize().getValue()); options.getViewParam().setFontSize(getFontSize().getValue()); options.getViewParam().setFontName((String)getFontName().getSelectedItem()); options.getViewParam().setScaleImages(getScaleImages().isSelected()); } @Override public String getHelpIndex() { // ZAP: added help index return "ui.dialogs.options.view"; } @SuppressWarnings("serial") private class JComboBoxFontRenderer extends BasicComboBoxRenderer { protected DefaultListCellRenderer defaultRenderer = new DefaultListCellRenderer(); @SuppressWarnings("rawtypes") public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { JLabel renderer = (JLabel) defaultRenderer.getListCellRendererComponent(list, value, index, isSelected,cellHasFocus); Font font = FontUtils.getFont((String)value); if (font != null) { renderer.setFont(FontUtils.getFont((String)value)); } else { renderer.setFont(FontUtils.getFont(FontUtils.Size.standard)); } return renderer; } } }
/** * Copyright (c) 2012, Ben Fortuna * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * o Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * o Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * o Neither the name of Ben Fortuna nor the names of any other contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.fortuna.ical4j.model; import java.io.IOException; import java.io.Serializable; import java.text.ParseException; import java.util.Calendar; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.StringTokenizer; import net.fortuna.ical4j.model.parameter.Value; import net.fortuna.ical4j.util.CompatibilityHints; import net.fortuna.ical4j.util.Configurator; import net.fortuna.ical4j.util.Dates; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; /** * $Id$ [18-Apr-2004] * * Defines a recurrence. * @version 2.0 * @author Ben Fortuna */ public class Recur implements Serializable { private static final long serialVersionUID = -7333226591784095142L; private static final String FREQ = "FREQ"; private static final String UNTIL = "UNTIL"; private static final String COUNT = "COUNT"; private static final String INTERVAL = "INTERVAL"; private static final String BYSECOND = "BYSECOND"; private static final String BYMINUTE = "BYMINUTE"; private static final String BYHOUR = "BYHOUR"; private static final String BYDAY = "BYDAY"; private static final String BYMONTHDAY = "BYMONTHDAY"; private static final String BYYEARDAY = "BYYEARDAY"; private static final String BYWEEKNO = "BYWEEKNO"; private static final String BYMONTH = "BYMONTH"; private static final String BYSETPOS = "BYSETPOS"; private static final String WKST = "WKST"; /** * Second frequency resolution. */ public static final String SECONDLY = "SECONDLY"; /** * Minute frequency resolution. */ public static final String MINUTELY = "MINUTELY"; /** * Hour frequency resolution. */ public static final String HOURLY = "HOURLY"; /** * Day frequency resolution. */ public static final String DAILY = "DAILY"; /** * Week frequency resolution. */ public static final String WEEKLY = "WEEKLY"; /** * Month frequency resolution. */ public static final String MONTHLY = "MONTHLY"; /** * Year frequency resolution. */ public static final String YEARLY = "YEARLY"; /** * When calculating dates matching this recur ({@code getDates()} or {@code getNextDate}), * this property defines the maximum number of attempt to find a matching date by * incrementing the seed. * <p>The default value is 1000. A value of -1 corresponds to no maximum.</p> */ public static final String KEY_MAX_INCREMENT_COUNT = "net.fortuna.ical4j.recur.maxincrementcount"; private static int maxIncrementCount; static { final String value = Configurator.getProperty(KEY_MAX_INCREMENT_COUNT); if (value != null && value.length() > 0) { maxIncrementCount = Integer.parseInt(value); } else { maxIncrementCount = 1000; } } private transient Log log = LogFactory.getLog(Recur.class); private String frequency; private Date until; private int count = -1; private int interval = -1; private NumberList secondList; private NumberList minuteList; private NumberList hourList; private WeekDayList dayList; private NumberList monthDayList; private NumberList yearDayList; private NumberList weekNoList; private NumberList monthList; private NumberList setPosList; private String weekStartDay; private int calendarWeekStartDay; private Map experimentalValues = new HashMap(); // Calendar field we increment based on frequency. private int calIncField; /** * Default constructor. */ public Recur() { // default week start is Monday per RFC5545 calendarWeekStartDay = Calendar.MONDAY; } /** * Constructs a new instance from the specified string value. * @param aValue a string representation of a recurrence. * @throws ParseException thrown when the specified string contains an invalid representation of an UNTIL date value */ public Recur(final String aValue) throws ParseException { // default week start is Monday per RFC5545 calendarWeekStartDay = Calendar.MONDAY; final StringTokenizer t = new StringTokenizer(aValue, ";="); while (t.hasMoreTokens()) { final String token = t.nextToken(); if (FREQ.equals(token)) { frequency = nextToken(t, token); } else if (UNTIL.equals(token)) { final String untilString = nextToken(t, token); if (untilString != null && untilString.indexOf("T") >= 0) { until = new DateTime(untilString); // UNTIL must be specified in UTC time.. ((DateTime) until).setUtc(true); } else { until = new Date(untilString); } } else if (COUNT.equals(token)) { count = Integer.parseInt(nextToken(t, token)); } else if (INTERVAL.equals(token)) { interval = Integer.parseInt(nextToken(t, token)); } else if (BYSECOND.equals(token)) { secondList = new NumberList(nextToken(t, token), 0, 59, false); } else if (BYMINUTE.equals(token)) { minuteList = new NumberList(nextToken(t, token), 0, 59, false); } else if (BYHOUR.equals(token)) { hourList = new NumberList(nextToken(t, token), 0, 23, false); } else if (BYDAY.equals(token)) { dayList = new WeekDayList(nextToken(t, token)); } else if (BYMONTHDAY.equals(token)) { monthDayList = new NumberList(nextToken(t, token), 1, 31, true); } else if (BYYEARDAY.equals(token)) { yearDayList = new NumberList(nextToken(t, token), 1, 366, true); } else if (BYWEEKNO.equals(token)) { weekNoList = new NumberList(nextToken(t, token), 1, 53, true); } else if (BYMONTH.equals(token)) { monthList = new NumberList(nextToken(t, token), 1, 12, false); } else if (BYSETPOS.equals(token)) { setPosList = new NumberList(nextToken(t, token), 1, 366, true); } else if (WKST.equals(token)) { weekStartDay = nextToken(t, token); calendarWeekStartDay = WeekDay.getCalendarDay(new WeekDay(weekStartDay)); } else { if (CompatibilityHints.isHintEnabled(CompatibilityHints.KEY_RELAXED_PARSING)) { // assume experimental value.. experimentalValues.put(token, nextToken(t, token)); } else { throw new IllegalArgumentException("Invalid recurrence rule part: " + token + "=" + nextToken(t, token)); } } } validateFrequency(); } private String nextToken(StringTokenizer t, String lastToken) { try { return t.nextToken(); } catch (NoSuchElementException e) { throw new IllegalArgumentException("Missing expected token, last token: " + lastToken); } } /** * @param frequency a recurrence frequency string * @param until maximum recurrence date */ public Recur(final String frequency, final Date until) { // default week start is Monday per RFC5545 calendarWeekStartDay = Calendar.MONDAY; this.frequency = frequency; this.until = until; validateFrequency(); } /** * @param frequency a recurrence frequency string * @param count maximum recurrence count */ public Recur(final String frequency, final int count) { // default week start is Monday per RFC5545 calendarWeekStartDay = Calendar.MONDAY; this.frequency = frequency; this.count = count; validateFrequency(); } /** * @return Returns the dayList. */ public final WeekDayList getDayList() { if (dayList == null) { dayList = new WeekDayList(); } return dayList; } /** * @return Returns the hourList. */ public final NumberList getHourList() { if (hourList == null) { hourList = new NumberList(0, 23, false); } return hourList; } /** * @return Returns the minuteList. */ public final NumberList getMinuteList() { if (minuteList == null) { minuteList = new NumberList(0, 59, false); } return minuteList; } /** * @return Returns the monthDayList. */ public final NumberList getMonthDayList() { if (monthDayList == null) { monthDayList = new NumberList(1, 31, true); } return monthDayList; } /** * @return Returns the monthList. */ public final NumberList getMonthList() { if (monthList == null) { monthList = new NumberList(1, 12, false); } return monthList; } /** * @return Returns the secondList. */ public final NumberList getSecondList() { if (secondList == null) { secondList = new NumberList(0, 59, false); } return secondList; } /** * @return Returns the setPosList. */ public final NumberList getSetPosList() { if (setPosList == null) { setPosList = new NumberList(1, 366, true); } return setPosList; } /** * @return Returns the weekNoList. */ public final NumberList getWeekNoList() { if (weekNoList == null) { weekNoList = new NumberList(1, 53, true); } return weekNoList; } /** * @return Returns the yearDayList. */ public final NumberList getYearDayList() { if (yearDayList == null) { yearDayList = new NumberList(1, 366, true); } return yearDayList; } /** * @return Returns the count or -1 if the rule does not have a count. */ public final int getCount() { return count; } /** * @return Returns the experimentalValues. */ public final Map getExperimentalValues() { return experimentalValues; } /** * @return Returns the frequency. */ public final String getFrequency() { return frequency; } /** * @return Returns the interval or -1 if the rule does not have an interval defined. */ public final int getInterval() { return interval; } /** * @return Returns the until or null if there is none. */ public final Date getUntil() { return until; } /** * @return Returns the weekStartDay or null if there is none. */ public final String getWeekStartDay() { return weekStartDay; } /** * @param weekStartDay The weekStartDay to set. */ public final void setWeekStartDay(final String weekStartDay) { this.weekStartDay = weekStartDay; if (weekStartDay != null) { calendarWeekStartDay = WeekDay.getCalendarDay(new WeekDay(weekStartDay)); } } /** * {@inheritDoc} */ public final String toString() { final StringBuffer b = new StringBuffer(); b.append(FREQ); b.append('='); b.append(frequency); if (weekStartDay != null) { b.append(';'); b.append(WKST); b.append('='); b.append(weekStartDay); } if (until != null) { b.append(';'); b.append(UNTIL); b.append('='); // Note: date-time representations should always be in UTC time. b.append(until); } if (count >= 1) { b.append(';'); b.append(COUNT); b.append('='); b.append(count); } if (interval >= 1) { b.append(';'); b.append(INTERVAL); b.append('='); b.append(interval); } if (!getMonthList().isEmpty()) { b.append(';'); b.append(BYMONTH); b.append('='); b.append(monthList); } if (!getWeekNoList().isEmpty()) { b.append(';'); b.append(BYWEEKNO); b.append('='); b.append(weekNoList); } if (!getYearDayList().isEmpty()) { b.append(';'); b.append(BYYEARDAY); b.append('='); b.append(yearDayList); } if (!getMonthDayList().isEmpty()) { b.append(';'); b.append(BYMONTHDAY); b.append('='); b.append(monthDayList); } if (!getDayList().isEmpty()) { b.append(';'); b.append(BYDAY); b.append('='); b.append(dayList); } if (!getHourList().isEmpty()) { b.append(';'); b.append(BYHOUR); b.append('='); b.append(hourList); } if (!getMinuteList().isEmpty()) { b.append(';'); b.append(BYMINUTE); b.append('='); b.append(minuteList); } if (!getSecondList().isEmpty()) { b.append(';'); b.append(BYSECOND); b.append('='); b.append(secondList); } if (!getSetPosList().isEmpty()) { b.append(';'); b.append(BYSETPOS); b.append('='); b.append(setPosList); } return b.toString(); } /** * Returns a list of start dates in the specified period represented by this recur. Any date fields not specified by * this recur are retained from the period start, and as such you should ensure the period start is initialised * correctly. * @param periodStart the start of the period * @param periodEnd the end of the period * @param value the type of dates to generate (i.e. date/date-time) * @return a list of dates */ public final DateList getDates(final Date periodStart, final Date periodEnd, final Value value) { return getDates(periodStart, periodStart, periodEnd, value, -1); } /** * Convenience method for retrieving recurrences in a specified period. * @param seed a seed date for generating recurrence instances * @param period the period of returned recurrence dates * @param value type of dates to generate * @return a list of dates */ public final DateList getDates(final Date seed, final Period period, final Value value) { return getDates(seed, period.getStart(), period.getEnd(), value, -1); } /** * Returns a list of start dates in the specified period represented by this recur. This method includes a base date * argument, which indicates the start of the fist occurrence of this recurrence. The base date is used to inject * default values to return a set of dates in the correct format. For example, if the search start date (start) is * Wed, Mar 23, 12:19PM, but the recurrence is Mon - Fri, 9:00AM - 5:00PM, the start dates returned should all be at * 9:00AM, and not 12:19PM. * @return a list of dates represented by this recur instance * @param seed the start date of this Recurrence's first instance * @param periodStart the start of the period * @param periodEnd the end of the period * @param value the type of dates to generate (i.e. date/date-time) */ public final DateList getDates(final Date seed, final Date periodStart, final Date periodEnd, final Value value) { return getDates(seed, periodStart, periodEnd, value, -1); } /** * Returns a list of start dates in the specified period represented by this recur. This method includes a base date * argument, which indicates the start of the fist occurrence of this recurrence. The base date is used to inject * default values to return a set of dates in the correct format. For example, if the search start date (start) is * Wed, Mar 23, 12:19PM, but the recurrence is Mon - Fri, 9:00AM - 5:00PM, the start dates returned should all be at * 9:00AM, and not 12:19PM. * @return a list of dates represented by this recur instance * @param seed the start date of this Recurrence's first instance * @param periodStart the start of the period * @param periodEnd the end of the period * @param value the type of dates to generate (i.e. date/date-time) * @param maxCount limits the number of instances returned. Up to one years * worth extra may be returned. Less than 0 means no limit */ public final DateList getDates(final Date seed, final Date periodStart, final Date periodEnd, final Value value, final int maxCount) { final DateList dates = new DateList(value); if (seed instanceof DateTime) { if (((DateTime) seed).isUtc()) { dates.setUtc(true); } else { dates.setTimeZone(((DateTime) seed).getTimeZone()); } } final Calendar cal = getCalendarInstance(seed, true); // optimize the start time for selecting candidates // (only applicable where a COUNT is not specified) if (getCount() < 1) { final Calendar seededCal = (Calendar) cal.clone(); while (seededCal.getTime().before(periodStart)) { cal.setTime(seededCal.getTime()); increment(seededCal); } } int invalidCandidateCount = 0; int noCandidateIncrementCount = 0; Date candidate = null; while ((maxCount < 0) || (dates.size() < maxCount)) { final Date candidateSeed = Dates.getInstance(cal.getTime(), value); if (getUntil() != null && candidate != null && candidate.after(getUntil())) { break; } if (periodEnd != null && candidate != null && candidate.after(periodEnd)) { break; } if (getCount() >= 1 && (dates.size() + invalidCandidateCount) >= getCount()) { break; } // if (Value.DATE_TIME.equals(value)) { if (candidateSeed instanceof DateTime) { if (dates.isUtc()) { ((DateTime) candidateSeed).setUtc(true); } else { ((DateTime) candidateSeed).setTimeZone(dates.getTimeZone()); } } final DateList candidates = getCandidates(candidateSeed, value); if (!candidates.isEmpty()) { noCandidateIncrementCount = 0; // sort candidates for identifying when UNTIL date is exceeded.. Collections.sort(candidates); for (final Iterator i = candidates.iterator(); i.hasNext();) { candidate = (Date) i.next(); // don't count candidates that occur before the seed date.. if (!candidate.before(seed)) { // candidates exclusive of periodEnd.. if (candidate.before(periodStart) || !candidate.before(periodEnd)) { invalidCandidateCount++; } else if (getCount() >= 1 && (dates.size() + invalidCandidateCount) >= getCount()) { break; } else if (!(getUntil() != null && candidate.after(getUntil()))) { dates.add(candidate); } } } } else { noCandidateIncrementCount++; if ((maxIncrementCount > 0) && (noCandidateIncrementCount > maxIncrementCount)) { break; } } increment(cal); } // sort final list.. Collections.sort(dates); return dates; } /** * Returns the the next date of this recurrence given a seed date * and start date. The seed date indicates the start of the fist * occurrence of this recurrence. The start date is the * starting date to search for the next recurrence. Return null * if there is no occurrence date after start date. * @return the next date in the recurrence series after startDate * @param seed the start date of this Recurrence's first instance * @param startDate the date to start the search */ public final Date getNextDate(final Date seed, final Date startDate) { final Calendar cal = getCalendarInstance(seed, true); // optimize the start time for selecting candidates // (only applicable where a COUNT is not specified) if (getCount() < 1) { final Calendar seededCal = (Calendar) cal.clone(); while (seededCal.getTime().before(startDate)) { cal.setTime(seededCal.getTime()); increment(seededCal); } } int invalidCandidateCount = 0; int noCandidateIncrementCount = 0; Date candidate = null; final Value value = seed instanceof DateTime ? Value.DATE_TIME : Value.DATE; while (true) { final Date candidateSeed = Dates.getInstance(cal.getTime(), value); if (getUntil() != null && candidate != null && candidate.after(getUntil())) { break; } if (getCount() > 0 && invalidCandidateCount >= getCount()) { break; } if (Value.DATE_TIME.equals(value)) { if (((DateTime) seed).isUtc()) { ((DateTime) candidateSeed).setUtc(true); } else { ((DateTime) candidateSeed).setTimeZone(((DateTime) seed).getTimeZone()); } } final DateList candidates = getCandidates(candidateSeed, value); if (!candidates.isEmpty()) { noCandidateIncrementCount = 0; // sort candidates for identifying when UNTIL date is exceeded.. Collections.sort(candidates); for (final Iterator i = candidates.iterator(); i.hasNext();) { candidate = (Date) i.next(); // don't count candidates that occur before the seed date.. if (!candidate.before(seed)) { // Candidate must be after startDate because // we want the NEXT occurrence if (!candidate.after(startDate)) { invalidCandidateCount++; } else if (getCount() > 0 && invalidCandidateCount >= getCount()) { break; } else if (!(getUntil() != null && candidate.after(getUntil()))) { return candidate; } } } } else { noCandidateIncrementCount++; if ((maxIncrementCount > 0) && (noCandidateIncrementCount > maxIncrementCount)) { break; } } increment(cal); } return null; } /** * Increments the specified calendar according to the frequency and interval specified in this recurrence rule. * @param cal a java.util.Calendar to increment */ private void increment(final Calendar cal) { // initialise interval.. final int calInterval = (getInterval() >= 1) ? getInterval() : 1; cal.add(calIncField, calInterval); } /** * Returns a list of possible dates generated from the applicable BY* rules, using the specified date as a seed. * @param date the seed date * @param value the type of date list to return * @return a DateList */ private DateList getCandidates(final Date date, final Value value) { DateList dates = new DateList(value); if (date instanceof DateTime) { if (((DateTime) date).isUtc()) { dates.setUtc(true); } else { dates.setTimeZone(((DateTime) date).getTimeZone()); } } dates.add(date); dates = getMonthVariants(dates); // debugging.. if (log.isDebugEnabled()) { log.debug("Dates after BYMONTH processing: " + dates); } dates = getWeekNoVariants(dates); // debugging.. if (log.isDebugEnabled()) { log.debug("Dates after BYWEEKNO processing: " + dates); } dates = getYearDayVariants(dates); // debugging.. if (log.isDebugEnabled()) { log.debug("Dates after BYYEARDAY processing: " + dates); } dates = getMonthDayVariants(dates); // debugging.. if (log.isDebugEnabled()) { log.debug("Dates after BYMONTHDAY processing: " + dates); } dates = getDayVariants(dates); // debugging.. if (log.isDebugEnabled()) { log.debug("Dates after BYDAY processing: " + dates); } dates = getHourVariants(dates); // debugging.. if (log.isDebugEnabled()) { log.debug("Dates after BYHOUR processing: " + dates); } dates = getMinuteVariants(dates); // debugging.. if (log.isDebugEnabled()) { log.debug("Dates after BYMINUTE processing: " + dates); } dates = getSecondVariants(dates); // debugging.. if (log.isDebugEnabled()) { log.debug("Dates after BYSECOND processing: " + dates); } dates = applySetPosRules(dates); // debugging.. if (log.isDebugEnabled()) { log.debug("Dates after SETPOS processing: " + dates); } return dates; } /** * Applies BYSETPOS rules to <code>dates</code>. Valid positions are from 1 to the size of the date list. Invalid * positions are ignored. * @param dates */ private DateList applySetPosRules(final DateList dates) { // return if no SETPOS rules specified.. if (getSetPosList().isEmpty()) { return dates; } // sort the list before processing.. Collections.sort(dates); final DateList setPosDates = getDateListInstance(dates); final int size = dates.size(); for (final Iterator i = getSetPosList().iterator(); i.hasNext();) { final Integer setPos = (Integer) i.next(); final int pos = setPos.intValue(); if (pos > 0 && pos <= size) { setPosDates.add(dates.get(pos - 1)); } else if (pos < 0 && pos >= -size) { setPosDates.add(dates.get(size + pos)); } } return setPosDates; } /** * Applies BYMONTH rules specified in this Recur instance to the specified date list. If no BYMONTH rules are * specified the date list is returned unmodified. * @param dates * @return */ private DateList getMonthVariants(final DateList dates) { if (getMonthList().isEmpty()) { return dates; } final DateList monthlyDates = getDateListInstance(dates); for (final Iterator i = dates.iterator(); i.hasNext();) { final Date date = (Date) i.next(); final Calendar cal = getCalendarInstance(date, true); for (final Iterator j = getMonthList().iterator(); j.hasNext();) { final Integer month = (Integer) j.next(); // Java months are zero-based.. // cal.set(Calendar.MONTH, month.intValue() - 1); cal.roll(Calendar.MONTH, (month.intValue() - 1) - cal.get(Calendar.MONTH)); monthlyDates.add(Dates.getInstance(cal.getTime(), monthlyDates.getType())); } } return monthlyDates; } /** * Applies BYWEEKNO rules specified in this Recur instance to the specified date list. If no BYWEEKNO rules are * specified the date list is returned unmodified. * @param dates * @return */ private DateList getWeekNoVariants(final DateList dates) { if (getWeekNoList().isEmpty()) { return dates; } final DateList weekNoDates = getDateListInstance(dates); for (final Iterator i = dates.iterator(); i.hasNext();) { final Date date = (Date) i.next(); final Calendar cal = getCalendarInstance(date, true); for (final Iterator j = getWeekNoList().iterator(); j.hasNext();) { final Integer weekNo = (Integer) j.next(); cal.set(Calendar.WEEK_OF_YEAR, Dates.getAbsWeekNo(cal.getTime(), weekNo.intValue())); weekNoDates.add(Dates.getInstance(cal.getTime(), weekNoDates.getType())); } } return weekNoDates; } /** * Applies BYYEARDAY rules specified in this Recur instance to the specified date list. If no BYYEARDAY rules are * specified the date list is returned unmodified. * @param dates * @return */ private DateList getYearDayVariants(final DateList dates) { if (getYearDayList().isEmpty()) { return dates; } final DateList yearDayDates = getDateListInstance(dates); for (final Iterator i = dates.iterator(); i.hasNext();) { final Date date = (Date) i.next(); final Calendar cal = getCalendarInstance(date, true); for (final Iterator j = getYearDayList().iterator(); j.hasNext();) { final Integer yearDay = (Integer) j.next(); cal.set(Calendar.DAY_OF_YEAR, Dates.getAbsYearDay(cal.getTime(), yearDay.intValue())); yearDayDates.add(Dates.getInstance(cal.getTime(), yearDayDates.getType())); } } return yearDayDates; } /** * Applies BYMONTHDAY rules specified in this Recur instance to the specified date list. If no BYMONTHDAY rules are * specified the date list is returned unmodified. * @param dates * @return */ private DateList getMonthDayVariants(final DateList dates) { if (getMonthDayList().isEmpty()) { return dates; } final DateList monthDayDates = getDateListInstance(dates); for (final Iterator i = dates.iterator(); i.hasNext();) { final Date date = (Date) i.next(); final Calendar cal = getCalendarInstance(date, false); for (final Iterator j = getMonthDayList().iterator(); j.hasNext();) { final Integer monthDay = (Integer) j.next(); try { cal.set(Calendar.DAY_OF_MONTH, Dates.getAbsMonthDay(cal.getTime(), monthDay.intValue())); monthDayDates.add(Dates.getInstance(cal.getTime(), monthDayDates.getType())); } catch (IllegalArgumentException iae) { if (log.isTraceEnabled()) { log.trace("Invalid day of month: " + Dates.getAbsMonthDay(cal .getTime(), monthDay.intValue())); } } } } return monthDayDates; } /** * Applies BYDAY rules specified in this Recur instance to the specified date list. If no BYDAY rules are specified * the date list is returned unmodified. * @param dates * @return */ private DateList getDayVariants(final DateList dates) { if (getDayList().isEmpty()) { return dates; } final DateList weekDayDates = getDateListInstance(dates); for (final Iterator i = dates.iterator(); i.hasNext();) { final Date date = (Date) i.next(); for (final Iterator j = getDayList().iterator(); j.hasNext();) { final WeekDay weekDay = (WeekDay) j.next(); // if BYYEARDAY or BYMONTHDAY is specified filter existing // list.. if (!getYearDayList().isEmpty() || !getMonthDayList().isEmpty()) { final Calendar cal = getCalendarInstance(date, true); if (weekDay.equals(WeekDay.getWeekDay(cal))) { weekDayDates.add(date); } } else { weekDayDates.addAll(getAbsWeekDays(date, dates.getType(), weekDay)); } } } return weekDayDates; } /** * Returns a list of applicable dates corresponding to the specified week day in accordance with the frequency * specified by this recurrence rule. * @param date * @param weekDay * @return */ private List getAbsWeekDays(final Date date, final Value type, final WeekDay weekDay) { final Calendar cal = getCalendarInstance(date, true); final DateList days = new DateList(type); if (date instanceof DateTime) { if (((DateTime) date).isUtc()) { days.setUtc(true); } else { days.setTimeZone(((DateTime) date).getTimeZone()); } } final int calDay = WeekDay.getCalendarDay(weekDay); if (calDay == -1) { // a matching weekday cannot be identified.. return days; } if (DAILY.equals(getFrequency())) { if (cal.get(Calendar.DAY_OF_WEEK) == calDay) { days.add(Dates.getInstance(cal.getTime(), type)); } } else if (WEEKLY.equals(getFrequency()) || !getWeekNoList().isEmpty()) { final int weekNo = cal.get(Calendar.WEEK_OF_YEAR); // construct a list of possible week days.. cal.set(Calendar.DAY_OF_WEEK, cal.getFirstDayOfWeek()); while (cal.get(Calendar.DAY_OF_WEEK) != calDay) { cal.add(Calendar.DAY_OF_WEEK, 1); } // final int weekNo = cal.get(Calendar.WEEK_OF_YEAR); if (cal.get(Calendar.WEEK_OF_YEAR) == weekNo) { days.add(Dates.getInstance(cal.getTime(), type)); // cal.add(Calendar.DAY_OF_WEEK, Dates.DAYS_PER_WEEK); } } else if (MONTHLY.equals(getFrequency()) || !getMonthList().isEmpty()) { final int month = cal.get(Calendar.MONTH); // construct a list of possible month days.. cal.set(Calendar.DAY_OF_MONTH, 1); while (cal.get(Calendar.DAY_OF_WEEK) != calDay) { cal.add(Calendar.DAY_OF_MONTH, 1); } while (cal.get(Calendar.MONTH) == month) { days.add(Dates.getInstance(cal.getTime(), type)); cal.add(Calendar.DAY_OF_MONTH, Dates.DAYS_PER_WEEK); } } else if (YEARLY.equals(getFrequency())) { final int year = cal.get(Calendar.YEAR); // construct a list of possible year days.. cal.set(Calendar.DAY_OF_YEAR, 1); while (cal.get(Calendar.DAY_OF_WEEK) != calDay) { cal.add(Calendar.DAY_OF_YEAR, 1); } while (cal.get(Calendar.YEAR) == year) { days.add(Dates.getInstance(cal.getTime(), type)); cal.add(Calendar.DAY_OF_YEAR, Dates.DAYS_PER_WEEK); } } return getOffsetDates(days, weekDay.getOffset()); } /** * Returns a single-element sublist containing the element of <code>list</code> at <code>offset</code>. Valid * offsets are from 1 to the size of the list. If an invalid offset is supplied, all elements from <code>list</code> * are added to <code>sublist</code>. * @param list * @param offset * @param sublist */ private List getOffsetDates(final DateList dates, final int offset) { if (offset == 0) { return dates; } final List offsetDates = getDateListInstance(dates); final int size = dates.size(); if (offset < 0 && offset >= -size) { offsetDates.add(dates.get(size + offset)); } else if (offset > 0 && offset <= size) { offsetDates.add(dates.get(offset - 1)); } return offsetDates; } /** * Applies BYHOUR rules specified in this Recur instance to the specified date list. If no BYHOUR rules are * specified the date list is returned unmodified. * @param dates * @return */ private DateList getHourVariants(final DateList dates) { if (getHourList().isEmpty()) { return dates; } final DateList hourlyDates = getDateListInstance(dates); for (final Iterator i = dates.iterator(); i.hasNext();) { final Date date = (Date) i.next(); final Calendar cal = getCalendarInstance(date, true); for (final Iterator j = getHourList().iterator(); j.hasNext();) { final Integer hour = (Integer) j.next(); cal.set(Calendar.HOUR_OF_DAY, hour.intValue()); hourlyDates.add(Dates.getInstance(cal.getTime(), hourlyDates.getType())); } } return hourlyDates; } /** * Applies BYMINUTE rules specified in this Recur instance to the specified date list. If no BYMINUTE rules are * specified the date list is returned unmodified. * @param dates * @return */ private DateList getMinuteVariants(final DateList dates) { if (getMinuteList().isEmpty()) { return dates; } final DateList minutelyDates = getDateListInstance(dates); for (final Iterator i = dates.iterator(); i.hasNext();) { final Date date = (Date) i.next(); final Calendar cal = getCalendarInstance(date, true); for (final Iterator j = getMinuteList().iterator(); j.hasNext();) { final Integer minute = (Integer) j.next(); cal.set(Calendar.MINUTE, minute.intValue()); minutelyDates.add(Dates.getInstance(cal.getTime(), minutelyDates.getType())); } } return minutelyDates; } /** * Applies BYSECOND rules specified in this Recur instance to the specified date list. If no BYSECOND rules are * specified the date list is returned unmodified. * @param dates * @return */ private DateList getSecondVariants(final DateList dates) { if (getSecondList().isEmpty()) { return dates; } final DateList secondlyDates = getDateListInstance(dates); for (final Iterator i = dates.iterator(); i.hasNext();) { final Date date = (Date) i.next(); final Calendar cal = getCalendarInstance(date, true); for (final Iterator j = getSecondList().iterator(); j.hasNext();) { final Integer second = (Integer) j.next(); cal.set(Calendar.SECOND, second.intValue()); secondlyDates.add(Dates.getInstance(cal.getTime(), secondlyDates.getType())); } } return secondlyDates; } private void validateFrequency() { if (frequency == null) { throw new IllegalArgumentException( "A recurrence rule MUST contain a FREQ rule part."); } if (SECONDLY.equals(getFrequency())) { calIncField = Calendar.SECOND; } else if (MINUTELY.equals(getFrequency())) { calIncField = Calendar.MINUTE; } else if (HOURLY.equals(getFrequency())) { calIncField = Calendar.HOUR_OF_DAY; } else if (DAILY.equals(getFrequency())) { calIncField = Calendar.DAY_OF_YEAR; } else if (WEEKLY.equals(getFrequency())) { calIncField = Calendar.WEEK_OF_YEAR; } else if (MONTHLY.equals(getFrequency())) { calIncField = Calendar.MONTH; } else if (YEARLY.equals(getFrequency())) { calIncField = Calendar.YEAR; } else { throw new IllegalArgumentException("Invalid FREQ rule part '" + frequency + "' in recurrence rule"); } } /** * @param count The count to set. */ public final void setCount(final int count) { this.count = count; this.until = null; } /** * @param frequency The frequency to set. */ public final void setFrequency(final String frequency) { this.frequency = frequency; validateFrequency(); } /** * @param interval The interval to set. */ public final void setInterval(final int interval) { this.interval = interval; } /** * @param until The until to set. */ public final void setUntil(final Date until) { this.until = until; this.count = -1; } /** * Construct a Calendar object and sets the time. * @param date * @param lenient * @return */ private Calendar getCalendarInstance(final Date date, final boolean lenient) { Calendar cal = Dates.getCalendarInstance(date); // A week should have at least 4 days to be considered as such per RFC5545 cal.setMinimalDaysInFirstWeek(4); cal.setFirstDayOfWeek(calendarWeekStartDay); cal.setLenient(lenient); cal.setTime(date); return cal; } /** * @param stream * @throws IOException * @throws ClassNotFoundException */ private void readObject(final java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); log = LogFactory.getLog(Recur.class); } /** * Instantiate a new datelist with the same type, timezone and utc settings * as the origList. * @param origList * @return a new empty list. */ private static DateList getDateListInstance(final DateList origList) { final DateList list = new DateList(origList.getType()); if (origList.isUtc()) { list.setUtc(true); } else { list.setTimeZone(origList.getTimeZone()); } return list; } }
package io.cattle.platform.resource.pool.impl; import static io.cattle.platform.core.model.tables.ResourcePoolTable.*; import io.cattle.platform.core.model.ResourcePool; import io.cattle.platform.object.ObjectManager; import io.cattle.platform.object.util.ObjectUtils; import io.cattle.platform.resource.pool.PooledResource; import io.cattle.platform.resource.pool.PooledResourceItemGenerator; import io.cattle.platform.resource.pool.PooledResourceItemGeneratorFactory; import io.cattle.platform.resource.pool.PooledResourceOptions; import io.cattle.platform.resource.pool.ResourcePoolManager; import io.cattle.platform.util.type.CollectionUtils; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.inject.Inject; import org.jooq.exception.DataAccessException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ResourcePoolManagerImpl implements ResourcePoolManager { private static final Logger log = LoggerFactory.getLogger(ResourcePoolManagerImpl.class); ObjectManager objectManager; List<PooledResourceItemGeneratorFactory> factories; @Override public List<PooledResource> allocateResource(Object pool, Object owner, PooledResourceOptions options) { String qualifier = options.getQualifier(); int count = options.getCount(); String poolType = getResourceType(pool); long poolId = getResourceId(pool); String ownerType = getResourceType(owner); long ownerId = getResourceId(owner); Map<Object, Object> keys = CollectionUtils.asMap((Object) RESOURCE_POOL.POOL_TYPE, poolType, (Object) RESOURCE_POOL.POOL_ID, poolId, RESOURCE_POOL.QUALIFIER, qualifier, RESOURCE_POOL.OWNER_TYPE, ownerType, RESOURCE_POOL.OWNER_ID, ownerId); List<ResourcePool> resourcePools = new ArrayList<ResourcePool>(objectManager.find(ResourcePool.class, keys)); List<PooledResource> result = new ArrayList<PooledResource>(); for (ResourcePool resourcePool : resourcePools) { result.add(new DefaultPooledResource(resourcePool.getItem())); } while (result.size() < count) { String item = getItem(keys, pool, qualifier, options.getRequestedItem()); if (item == null) { break; } else { log.info("Assigning [{}] from pool [{}:{}] to owner [{}:{}]", item, poolType, poolId, ownerType, ownerId); } result.add(new DefaultPooledResource(item)); } if (result.size() != count) { log.info("Failed to find [{}] items for pool [{}:{}] and owner [{}:{}]", count, poolType, poolId, ownerType, ownerId); releaseResource(pool, owner, options); return null; } return result; } @Override public void releaseResource(Object pool, Object owner) { releaseResource(pool, owner, new PooledResourceOptions()); } @Override public void releaseResource(Object pool, Object owner, PooledResourceOptions options) { String poolType = getResourceType(pool); long poolId = getResourceId(pool); String ownerType = getResourceType(owner); long ownerId = getResourceId(owner); Map<Object, Object> keys = CollectionUtils.asMap((Object) RESOURCE_POOL.POOL_TYPE, poolType, (Object) RESOURCE_POOL.POOL_ID, poolId, RESOURCE_POOL.QUALIFIER, options.getQualifier(), RESOURCE_POOL.OWNER_TYPE, ownerType, RESOURCE_POOL.OWNER_ID, ownerId); for (ResourcePool resource : objectManager.find(ResourcePool.class, keys)) { log.info("Releasing [{}] id [{}] to pool [{}:{}] from owner [{}:{}]", resource.getItem(), resource.getId(), poolType, poolId, ownerType, ownerId); objectManager.delete(resource); } } @Override public void transferResource(Object pool, Object owner, Object newOwner) { transferResource(pool, owner, newOwner, new PooledResourceOptions()); } @Override public void transferResource(Object pool, Object owner, Object newOwner, PooledResourceOptions options) { String poolType = getResourceType(pool); long poolId = getResourceId(pool); String ownerType = getResourceType(owner); long ownerId = getResourceId(owner); String newOwnerType = getResourceType(newOwner); long newOwnerId = getResourceId(newOwner); Map<Object, Object> keys = CollectionUtils.asMap((Object) RESOURCE_POOL.POOL_TYPE, poolType, (Object) RESOURCE_POOL.POOL_ID, poolId, RESOURCE_POOL.QUALIFIER, options.getQualifier(), RESOURCE_POOL.OWNER_TYPE, ownerType, RESOURCE_POOL.OWNER_ID, ownerId); for (ResourcePool resource : objectManager.find(ResourcePool.class, keys)) { log.info("Transfering [{}] id [{}] from pool [{}:{}] from owner [{}:{}] to owner [{}:{}]", resource.getItem(), resource.getId(), poolType, poolId, ownerType, ownerId, newOwnerType, newOwnerId); resource.setOwnerType(newOwnerType); resource.setOwnerId(newOwnerId); objectManager.persist(resource); } } @Override public PooledResource allocateOneResource(Object pool, Object owner, PooledResourceOptions options) { List<PooledResource> resources = allocateResource(pool, owner, options); return (resources == null || resources.size() == 0) ? null : resources.get(0); } protected String getItem(Map<Object, Object> keys, Object pool, String qualifier, String tryItem) { PooledResourceItemGenerator generator = null; for (PooledResourceItemGeneratorFactory factory : factories) { generator = factory.getGenerator(pool, qualifier); if (generator != null) { break; } } if (generator == null) { log.error("Failed to find generator for pool [{}]", pool); return null; } while (generator.hasNext()) { String item = null; if (tryItem == null) { item = generator.next(); } else { item = generator.isInPool(tryItem) ? tryItem : generator.next(); tryItem = null; } Map<Object, Object> newKeys = new HashMap<Object, Object>(keys); newKeys.put(RESOURCE_POOL.ITEM, item); Map<String, Object> props = objectManager.convertToPropertiesFor(ResourcePool.class, newKeys); try { return objectManager.create(ResourcePool.class, props).getItem(); } catch (DataAccessException e) { log.debug("Failed to create item [{}]", item); } } return null; } protected String getResourceType(Object obj) { if (GLOBAL.equals(obj)) { return GLOBAL; } String type = objectManager.getType(obj); if (type == null) { throw new IllegalStateException("Failed to find resource type for [" + obj + "]"); } return type; } protected long getResourceId(Object obj) { if (GLOBAL.equals(obj)) { return 1; } Object id = ObjectUtils.getId(obj); if (id instanceof Number) { return ((Number) id).longValue(); } throw new IllegalStateException("Failed to find resource id for [" + obj + "]"); } public ObjectManager getObjectManager() { return objectManager; } @Inject public void setObjectManager(ObjectManager objectManager) { this.objectManager = objectManager; } public List<PooledResourceItemGeneratorFactory> getFactories() { return factories; } @Inject public void setFactories(List<PooledResourceItemGeneratorFactory> factories) { this.factories = factories; } }
/* * Copyright 2012 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.heuristic.selector.move.composite; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; import com.google.common.collect.Iterators; import org.optaplanner.core.impl.heuristic.move.Move; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionProbabilityWeightFactory; import org.optaplanner.core.impl.heuristic.selector.common.iterator.SelectionIterator; import org.optaplanner.core.impl.heuristic.selector.move.MoveSelector; import org.optaplanner.core.impl.phase.scope.AbstractStepScope; import org.optaplanner.core.impl.score.director.ScoreDirector; import org.optaplanner.core.impl.solver.random.RandomUtils; /** * A {@link CompositeMoveSelector} that unions 2 or more {@link MoveSelector}s. * <p> * For example: a union of {A, B, C} and {X, Y} will result in {A, B, C, X, Y}. * <p> * Warning: there is no duplicated {@link Move} check, so union of {A, B, C} and {B, D} will result in {A, B, C, B, D}. * @see CompositeMoveSelector */ public class UnionMoveSelector extends CompositeMoveSelector { protected final SelectionProbabilityWeightFactory selectorProbabilityWeightFactory; protected ScoreDirector scoreDirector; public UnionMoveSelector(List<MoveSelector> childMoveSelectorList, boolean randomSelection) { this(childMoveSelectorList, randomSelection, null); } public UnionMoveSelector(List<MoveSelector> childMoveSelectorList, boolean randomSelection, SelectionProbabilityWeightFactory selectorProbabilityWeightFactory) { super(childMoveSelectorList, randomSelection); this.selectorProbabilityWeightFactory = selectorProbabilityWeightFactory; if (!randomSelection) { if (selectorProbabilityWeightFactory != null) { throw new IllegalArgumentException("The selector (" + this + ") with randomSelection (" + randomSelection + ") cannot have a selectorProbabilityWeightFactory (" + selectorProbabilityWeightFactory + ")."); } } else { if (selectorProbabilityWeightFactory == null) { throw new IllegalArgumentException("The selector (" + this + ") with randomSelection (" + randomSelection + ") requires a selectorProbabilityWeightFactory (" + selectorProbabilityWeightFactory + ")."); } } } @Override public void stepStarted(AbstractStepScope stepScope) { scoreDirector = stepScope.getScoreDirector(); super.stepStarted(stepScope); } @Override public void stepEnded(AbstractStepScope stepScope) { super.stepEnded(stepScope); scoreDirector = null; } // ************************************************************************ // Worker methods // ************************************************************************ @Override public boolean isNeverEnding() { if (randomSelection) { for (MoveSelector moveSelector : childMoveSelectorList) { if (moveSelector.isNeverEnding()) { return true; } } // The UnionMoveSelector is special: it can be randomSelection true and still neverEnding false return false; } else { // Only the last childMoveSelector can be neverEnding if (!childMoveSelectorList.isEmpty() && childMoveSelectorList.get(childMoveSelectorList.size() - 1).isNeverEnding()) { return true; } return false; } } @Override public long getSize() { long size = 0L; for (MoveSelector moveSelector : childMoveSelectorList) { size += moveSelector.getSize(); } return size; } @Override public Iterator<Move> iterator() { if (!randomSelection) { Iterator<Move> iterator = Collections.emptyIterator(); for (MoveSelector moveSelector : childMoveSelectorList) { iterator = Iterators.concat(iterator, moveSelector.iterator()); } return iterator; } else { return new RandomUnionMoveIterator(); } } public class RandomUnionMoveIterator extends SelectionIterator<Move> { protected final Map<Iterator<Move>, ProbabilityItem> probabilityItemMap; protected final NavigableMap<Double, Iterator<Move>> moveIteratorMap; protected double probabilityWeightTotal; protected boolean stale; public RandomUnionMoveIterator() { probabilityItemMap = new LinkedHashMap<>(childMoveSelectorList.size()); for (MoveSelector moveSelector : childMoveSelectorList) { Iterator<Move> moveIterator = moveSelector.iterator(); ProbabilityItem probabilityItem = new ProbabilityItem(); probabilityItem.moveSelector = moveSelector; probabilityItem.moveIterator = moveIterator; probabilityItem.probabilityWeight = selectorProbabilityWeightFactory .createProbabilityWeight(scoreDirector, moveSelector); if (probabilityItem.probabilityWeight < 0.0) { throw new IllegalStateException( "The selectorProbabilityWeightFactory (" + selectorProbabilityWeightFactory + ") returned a negative probabilityWeight (" + probabilityItem.probabilityWeight + ")."); } probabilityItemMap.put(moveIterator, probabilityItem); } moveIteratorMap = new TreeMap<>(); stale = true; } @Override public boolean hasNext() { if (stale) { refreshMoveIteratorMap(); } return !moveIteratorMap.isEmpty(); } @Override public Move next() { if (stale) { refreshMoveIteratorMap(); } double randomOffset = RandomUtils.nextDouble(workingRandom, probabilityWeightTotal); Map.Entry<Double, Iterator<Move>> entry = moveIteratorMap.floorEntry(randomOffset); // entry is never null because randomOffset < probabilityWeightTotal Iterator<Move> moveIterator = entry.getValue(); Move next = moveIterator.next(); if (!moveIterator.hasNext()) { stale = true; } return next; } private void refreshMoveIteratorMap() { moveIteratorMap.clear(); double probabilityWeightOffset = 0.0; for (ProbabilityItem probabilityItem : probabilityItemMap.values()) { if (probabilityItem.probabilityWeight != 0.0 && probabilityItem.moveIterator.hasNext()) { moveIteratorMap.put(probabilityWeightOffset, probabilityItem.moveIterator); probabilityWeightOffset += probabilityItem.probabilityWeight; } } probabilityWeightTotal = probabilityWeightOffset; } } private static class ProbabilityItem { protected MoveSelector moveSelector; protected Iterator<Move> moveIterator; protected double probabilityWeight; } @Override public String toString() { return "Union(" + childMoveSelectorList + ")"; } }
/** * Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.kaazing.gateway.transport.http.bridge.filter; import static java.lang.String.format; import static org.kaazing.gateway.transport.BridgeSession.REMOTE_ADDRESS; import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_FORWARDED; import java.net.URI; import java.security.Principal; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ScheduledExecutorService; import javax.security.auth.Subject; import org.apache.mina.core.session.AttributeKey; import org.apache.mina.core.session.IoSession; import org.apache.mina.core.write.WriteRequest; import org.kaazing.gateway.resource.address.ResourceAddress; import org.kaazing.gateway.resource.address.http.HttpResourceAddress; import org.kaazing.gateway.security.TypedCallbackHandlerMap; import org.kaazing.gateway.security.auth.DefaultLoginResult; import org.kaazing.gateway.security.auth.token.DefaultAuthenticationToken; import org.kaazing.gateway.transport.http.HttpCookie; import org.kaazing.gateway.transport.http.HttpProtocol; import org.kaazing.gateway.transport.http.HttpStatus; import org.kaazing.gateway.transport.http.bridge.HttpMessage; import org.kaazing.gateway.transport.http.bridge.HttpRequestMessage; import org.kaazing.gateway.transport.http.bridge.HttpResponseMessage; import org.kaazing.gateway.transport.http.security.auth.token.AuthenticationTokenExtractor; import org.kaazing.gateway.transport.http.security.auth.token.DefaultAuthenticationTokenExtractor; import org.kaazing.gateway.util.scheduler.SchedulerProvider; import org.kaazing.mina.core.session.IoSessionEx; import org.slf4j.Logger; public class HttpSubjectSecurityFilter extends HttpLoginSecurityFilter { public static final String NAME = HttpProtocol.NAME + "#security"; public static final String AUTHORIZATION_HEADER = "Authorization"; public static final String WWW_AUTHENTICATE_HEADER = "WWW-Authenticate"; /** * Prefix to the authentication scheme to indicate that the Kaazing client application will handle the challenge rather than * delegate to the browser or the native platform. */ public static final String AUTH_SCHEME_APPLICATION_PREFIX = "Application "; public static final String AUTH_SCHEME_BASIC = "Basic"; public static final String AUTH_SCHEME_NEGOTIATE = "Negotiate"; private static final String HEADER_FORWARDED_REMOTE_IP_ADDRESS = "for=%s"; static final AttributeKey NEW_SESSION_COOKIE_KEY = new AttributeKey(HttpSubjectSecurityFilter.class, "sessionCookie"); private final AuthorizationMap authorizationMap; private ScheduledExecutorService scheduler; public HttpSubjectSecurityFilter() { this(null); } public HttpSubjectSecurityFilter(Logger logger) { super(logger); // Each filter has it's own map. There's only one filter though. // Reset the map when the filter is constructed to allow an embedded gateway to repeatedly launch // (e.g. for integration tests) authorizationMap = new AuthorizationMap(); } public void setSchedulerProvider(SchedulerProvider provider) { this.scheduler = provider.getScheduler("loginmodule", false); } // -------------------------------------------------------- // Security code for subject-security LEGACY @Override public void doMessageReceived(NextFilter nextFilter, IoSession session, Object message) throws Exception { // GL.debug("http", getClass().getSimpleName() + " request received."); if (! httpRequestMessageReceived(nextFilter, session, message)) return; HttpRequestMessage httpRequest = (HttpRequestMessage) message; final boolean loggerIsEnabled = logger != null && logger.isTraceEnabled(); String forwarded = httpRequest.getHeader(HEADER_FORWARDED); if ((forwarded == null) || (forwarded.length() == 0)) { String remoteIpAddress = null; ResourceAddress resourceAddress = REMOTE_ADDRESS.get(session); ResourceAddress tcpResourceAddress = resourceAddress.findTransport("tcp"); if (tcpResourceAddress != null) { URI resource = tcpResourceAddress.getResource(); remoteIpAddress = resource.getHost(); if (loggerIsEnabled) { logger.trace(format("HttpSubjectSecurityFilter: Remote IP Address: '%s'", remoteIpAddress)); } } if (remoteIpAddress != null) { httpRequest.setHeader(HEADER_FORWARDED, format(HEADER_FORWARDED_REMOTE_IP_ADDRESS, remoteIpAddress)); } } // Make sure we start with the subject from the underlying transport session in case it already has an authenticated subject // (e.g. we are httpxe and our transport is http or transport is SSL with a client certificate) if (httpRequest.getSubject() == null) { httpRequest.setSubject( ((IoSessionEx)session).getSubject() ); } ResourceAddress httpAddress = httpRequest.getLocalAddress(); String realmName = httpAddress.getOption(HttpResourceAddress.REALM_NAME); if ( realmName == null ) { setUnprotectedLoginContext(session); if (loggerIsEnabled) { logger.trace("HttpSubjectSecurityFilter skipped because no realm is configured."); } super.doMessageReceived(nextFilter, session, message); return; } securityMessageReceived(nextFilter, session, httpRequest); } protected void writeSessionCookie(IoSession session, HttpRequestMessage httpRequest, DefaultLoginResult loginResult) { // secure requests always have cookie accessible, even // on first access final HttpCookie newSessionCookie = (HttpCookie) loginResult.getLoginAuthorizationAttachment(); httpRequest.addCookie(newSessionCookie); session.setAttribute(NEW_SESSION_COOKIE_KEY, newSessionCookie); if (loggerEnabled()) { logger.trace("Sending session cookie {}", newSessionCookie); } } @Override public void filterWrite(NextFilter nextFilter, IoSession session, WriteRequest writeRequest) throws Exception { // include new session cookie in response Object message = writeRequest.getMessage(); HttpMessage httpMessage = (HttpMessage) message; switch (httpMessage.getKind()) { case RESPONSE: HttpResponseMessage httpResponse = (HttpResponseMessage) httpMessage; HttpCookie sessionCookie = (HttpCookie) session.removeAttribute(NEW_SESSION_COOKIE_KEY); if (sessionCookie != null) { httpResponse.addCookie(sessionCookie); } break; default: break; } super.filterWrite(nextFilter, session, writeRequest); } @Override public void exceptionCaught(NextFilter nextFilter, IoSession session, Throwable cause) throws Exception { if (loggerEnabled()) { logger.trace("Caught exception.", cause); } super.exceptionCaught(nextFilter, session, cause); } @Override public void destroy() throws Exception { super.destroy(); } /** * <strong>For testing only</strong> * * Allows for the authorizationMap to be accessed from unit tests. * */ AuthorizationMap getAuthorizationMap() { return authorizationMap; } /** * Captures the notion of a Subject object being valid for a certain time (e.g. inactivity-timeout). */ public static class TimedCredential { private Subject subject; private Long expirationTimestamp; public TimedCredential(Subject subject, Long expirationTimestamp) { if (subject == null) { throw new IllegalArgumentException("subject was null"); } this.subject = subject; this.expirationTimestamp = expirationTimestamp; } public Subject getSubject() { return subject; } public boolean hasExpirationTimestamp() { return expirationTimestamp != null; } public Long getExpirationTimestamp() { return expirationTimestamp; } public void setExpirationTimestamp(Long expirationTimestamp) { this.expirationTimestamp = expirationTimestamp; } public String toString() { StringBuilder sb = new StringBuilder(); sb.append("[TimedCredential: Subject("); for ( Principal p: subject.getPrincipals()) { sb.append(p.getName()).append('/'); } if ( subject.getPrincipals().size()>0) { sb.deleteCharAt(sb.length()-1); } sb.append(") "); if ( expirationTimestamp != null ) { String expires = new SimpleDateFormat("yyyyMMdd HH:mm:ss").format(new Date(expirationTimestamp*1000L)); sb.append("; expires on ").append(expires); } sb.append(" ]"); return sb.toString(); } } /** * Maintain a mapping of authorization key strings, to which subject they correspond * and for how long the mapping is valid for read. * <p/> * In addition, establish a reverse mapping from Subject to authorization key. * <p/> * Combined, this allows one to lookup, validation expiration by authorization key, and also to * clear the authentication map by Subject as well. */ public static class AuthorizationMap { private Map<String, TimedCredential> keyToTimedCredentialMap = new ConcurrentHashMap<>(); private Map<Subject, String> subjectToKeyMap = new ConcurrentHashMap<>(); // For testing TimedCredential get(String key) { return keyToTimedCredentialMap.get(key); } public TimedCredential get(String realmName, String key) { return keyToTimedCredentialMap.get(realmName + key); } public synchronized void put(String realmName, String key, TimedCredential value) { keyToTimedCredentialMap.put(realmName + key, value); subjectToKeyMap.put(value.subject, realmName + key); } public synchronized TimedCredential removeByKey(String realmName, String key) { TimedCredential removedValue = keyToTimedCredentialMap.remove(realmName + key); if (removedValue != null && removedValue.subject != null) { subjectToKeyMap.remove(removedValue.subject); } return removedValue; } public synchronized String removeBySubject(Subject subject) { String removedKey = subjectToKeyMap.remove(subject); if (removedKey != null) { keyToTimedCredentialMap.remove(removedKey); } return removedKey; } public boolean containsKey(String key) { return keyToTimedCredentialMap.containsKey(key); } public boolean containsSubject(Subject subject) { return subjectToKeyMap.containsKey(subject); } public String getKey(Subject subject) { return subjectToKeyMap.get(subject); } public int size() { return keyToTimedCredentialMap.size(); } } // -------------------------------------------------------- // Security code for subject-security going forward void securityMessageReceived(NextFilter nextFilter, IoSession session, Object message) throws Exception { final boolean loggerIsEnabled = logger != null && logger.isTraceEnabled(); if (! httpRequestMessageReceived(nextFilter, session, message)) return; HttpRequestMessage httpRequest = (HttpRequestMessage) message; ResourceAddress httpAddress = httpRequest.getLocalAddress(); String realmName = httpAddress.getOption(HttpResourceAddress.REALM_NAME); String realmAuthorizationMode = httpAddress.getOption(HttpResourceAddress.REALM_AUTHORIZATION_MODE); String realmChallengeScheme = httpAddress.getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME); if ( alreadyLoggedIn(session, httpAddress)) { // KG-3232, KG-3267: we should never leave the login context unset // for unprotected services. if (LOGIN_CONTEXT_KEY.get(session) == null) { setUnprotectedLoginContext(session); } if (loggerIsEnabled) { logger.trace("HttpSubjectSecurityFilter skipped because we are already allowed or logged in."); } super.doMessageReceived(nextFilter, session, message); return; } if ( realmName == null ) { setUnprotectedLoginContext(session); if (loggerIsEnabled) { logger.trace("HttpSecurityStandardFilter skipped because no realm is configured."); } super.doMessageReceived(nextFilter, session, message); return; } AuthenticationTokenExtractor tokenExtractor = DefaultAuthenticationTokenExtractor.INSTANCE; // // Login using the token; if login fails, the appropriate reply has already been sent from this filter // so stop the filter chain here. // DefaultAuthenticationToken authToken = (DefaultAuthenticationToken) tokenExtractor.extract(httpRequest); // If the client request provided authentication data which has // a challenge scheme, make sure that the client-sent challenge // scheme matches what we expect. If not, it's a badly formatted // request, and the client should be informed of this. String clientChallengeScheme = authToken.getScheme(); String expectedChallengeScheme = getBaseAuthScheme(realmChallengeScheme); if (clientChallengeScheme != null && clientChallengeScheme.equals(expectedChallengeScheme) == false) { if (loggerEnabled()) { logger.trace(String.format("A websocket request used the '%s' challenge scheme when we expected the '%s' challenge scheme", clientChallengeScheme, expectedChallengeScheme)); } String reason = String.format("Expected challenge scheme '%s' not found", expectedChallengeScheme); writeResponse(HttpStatus.CLIENT_BAD_REQUEST, reason, nextFilter, session, httpRequest); return; } // Now set the expected challenge scheme on the AuthToken. If the // client provided a scheme, the above check ensures that the // provided scheme matches our expected scheme, so calling setScheme() // does not harm anything. If the client did NOT provide a scheme, // this properly sets one, for the benefit of login modules which // check for such things. authToken.setScheme(expectedChallengeScheme); // Suspend incoming events into this filter. Will resume after LoginContext.login() completion suspendIncoming(session); // Schedule LoginContext.login() execution using a separate thread LoginContextTask loginContextTask = new LoginContextTask(nextFilter, session, httpRequest, authToken, null); scheduler.execute(loginContextTask); } // Task for running LoginContext.login() in a separate thread(other than I/O thread) private final class LoginContextTask implements Runnable { private final NextFilter nextFilter; private final IoSession session; private final HttpRequestMessage httpRequest; private final DefaultAuthenticationToken authToken; private final TypedCallbackHandlerMap additionalCallbacks; private final long createdTime; LoginContextTask(NextFilter nextFilter, IoSession session, HttpRequestMessage httpRequest, DefaultAuthenticationToken authToken, TypedCallbackHandlerMap additionalCallbacks) { this.nextFilter = nextFilter; this.session = session; this.httpRequest = httpRequest; this.authToken = authToken; this.additionalCallbacks = additionalCallbacks; this.createdTime = System.currentTimeMillis(); } @Override public void run() { if (loggerEnabled()) { logger.trace("Executing login task %d ms after scheduling for session %s", (System.currentTimeMillis() - createdTime) , session); } boolean succeeded = login(nextFilter, session, httpRequest, authToken, additionalCallbacks); try { if (succeeded) { // Complete the rest of the filter chain HttpSubjectSecurityFilter.super.doMessageReceived(nextFilter, session, httpRequest); } // If there are any events buffered during suspension, resume them HttpSubjectSecurityFilter.super.resumeIncoming(session); } catch (Exception e) { session.getFilterChain().fireExceptionCaught(e); } if (loggerEnabled()) { logger.trace("Finished login task after %d ms for session %s", (System.currentTimeMillis() - createdTime), session); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.controller.internal; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.junit.Assert.assertNotNull; import java.io.File; import java.net.URL; import java.net.URLClassLoader; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import javax.persistence.EntityManager; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.actionmanager.ActionDBAccessor; import org.apache.ambari.server.actionmanager.ActionManager; import org.apache.ambari.server.actionmanager.HostRoleCommandFactory; import org.apache.ambari.server.actionmanager.RequestFactory; import org.apache.ambari.server.actionmanager.StageFactory; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.audit.AuditLogger; import org.apache.ambari.server.checks.UpgradeCheckRegistry; import org.apache.ambari.server.checks.UpgradeCheckRegistryProvider; import org.apache.ambari.server.configuration.AmbariServerConfiguration; import org.apache.ambari.server.controller.AbstractRootServiceResponseFactory; import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.controller.KerberosHelper; import org.apache.ambari.server.controller.KerberosHelperImpl; import org.apache.ambari.server.controller.spi.Predicate; import org.apache.ambari.server.controller.spi.Request; import org.apache.ambari.server.controller.spi.Resource; import org.apache.ambari.server.controller.spi.ResourceProvider; import org.apache.ambari.server.controller.utilities.PredicateBuilder; import org.apache.ambari.server.controller.utilities.PropertyHelper; import org.apache.ambari.server.events.AgentConfigsUpdateEvent; import org.apache.ambari.server.hooks.HookContextFactory; import org.apache.ambari.server.hooks.HookService; import org.apache.ambari.server.ldap.service.AmbariLdapConfigurationProvider; import org.apache.ambari.server.metadata.RoleCommandOrderProvider; import org.apache.ambari.server.mpack.MpackManagerFactory; import org.apache.ambari.server.orm.dao.AlertDefinitionDAO; import org.apache.ambari.server.orm.dao.AlertsDAO; import org.apache.ambari.server.orm.dao.HostRoleCommandDAO; import org.apache.ambari.server.orm.dao.RepositoryVersionDAO; import org.apache.ambari.server.orm.entities.RepositoryVersionEntity; import org.apache.ambari.server.sample.checks.SampleServiceCheck; import org.apache.ambari.server.scheduler.ExecutionScheduler; import org.apache.ambari.server.security.encryption.CredentialStoreService; import org.apache.ambari.server.security.encryption.Encryptor; import org.apache.ambari.server.serveraction.kerberos.KerberosConfigDataFileWriterFactory; import org.apache.ambari.server.serveraction.kerberos.KerberosIdentityDataFileWriterFactory; import org.apache.ambari.server.stack.StackManagerFactory; import org.apache.ambari.server.stack.upgrade.Direction; import org.apache.ambari.server.stack.upgrade.UpgradePack; import org.apache.ambari.server.stack.upgrade.UpgradePack.PrerequisiteCheckConfig; import org.apache.ambari.server.stack.upgrade.orchestrate.UpgradeHelper; import org.apache.ambari.server.stageplanner.RoleGraphFactory; import org.apache.ambari.server.state.CheckHelper; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; import org.apache.ambari.server.state.ConfigFactory; import org.apache.ambari.server.state.ConfigHelper; import org.apache.ambari.server.state.DesiredConfig; import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.ServiceComponentFactory; import org.apache.ambari.server.state.ServiceComponentHost; import org.apache.ambari.server.state.ServiceComponentHostFactory; import org.apache.ambari.server.state.ServiceInfo; import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.StackInfo; import org.apache.ambari.server.state.configgroup.ConfigGroupFactory; import org.apache.ambari.server.state.scheduler.RequestExecutionFactory; import org.apache.ambari.server.state.stack.OsFamily; import org.apache.ambari.server.topology.PersistedState; import org.apache.ambari.server.topology.TopologyManager; import org.apache.ambari.server.topology.tasks.ConfigureClusterTaskFactory; import org.apache.ambari.spi.ClusterInformation; import org.apache.ambari.spi.RepositoryType; import org.apache.ambari.spi.upgrade.UpgradeCheckStatus; import org.apache.ambari.spi.upgrade.UpgradeCheckType; import org.apache.ambari.spi.upgrade.UpgradeType; import org.apache.commons.lang3.StringUtils; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Matchers; import org.powermock.api.easymock.PowerMock; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; import org.reflections.Configuration; import org.reflections.Reflections; import org.reflections.util.ConfigurationBuilder; import org.springframework.security.crypto.password.PasswordEncoder; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Provider; import com.google.inject.TypeLiteral; import com.google.inject.name.Names; /** * PreUpgradeCheckResourceProvider tests. */ @RunWith(PowerMockRunner.class) @PrepareForTest({ UpgradeCheckRegistry.class }) public class PreUpgradeCheckResourceProviderTest extends EasyMockSupport { private static final String TEST_SERVICE_CHECK_CLASS_NAME = "org.apache.ambari.server.sample.checks.SampleServiceCheck"; private static final String CLUSTER_NAME = "Cluster100"; @Test @SuppressWarnings({ "rawtypes", "unchecked" }) public void testGetResources() throws Exception{ Injector injector = createInjector(); AmbariManagementController managementController = injector.getInstance(AmbariManagementController.class); Clusters clusters = injector.getInstance(Clusters.class); UpgradeHelper upgradeHelper = injector.getInstance(UpgradeHelper.class); RepositoryVersionDAO repoDao = injector.getInstance(RepositoryVersionDAO.class); RepositoryVersionEntity repo = createNiceMock(RepositoryVersionEntity.class); UpgradePack upgradePack = createNiceMock(UpgradePack.class); PrerequisiteCheckConfig config = createNiceMock(PrerequisiteCheckConfig.class); Cluster cluster = createNiceMock(Cluster.class); Service service = createNiceMock(Service.class); ServiceInfo serviceInfo = createNiceMock(ServiceInfo.class); ClusterInformation clusterInformation = createNiceMock(ClusterInformation.class); expect(service.getDesiredRepositoryVersion()).andReturn(repo).atLeastOnce(); StackId currentStackId = createNiceMock(StackId.class); StackId targetStackId = createNiceMock(StackId.class); AmbariMetaInfo ambariMetaInfo = injector.getInstance(AmbariMetaInfo.class); ambariMetaInfo.init(); expectLastCall().anyTimes(); Config actualConfig = createNiceMock(Config.class); DesiredConfig desiredConfig = createNiceMock(DesiredConfig.class); Map<String, DesiredConfig> configMap = Maps.newHashMap(); configMap.put("config-type", desiredConfig); expect(desiredConfig.getTag()).andReturn("config-tag-1").atLeastOnce(); expect(cluster.getDesiredConfigs()).andReturn(configMap).atLeastOnce(); expect(cluster.getConfig("config-type", "config-tag-1")).andReturn(actualConfig).atLeastOnce(); expect(cluster.buildClusterInformation()).andReturn(clusterInformation).anyTimes(); expect(clusterInformation.getClusterName()).andReturn(CLUSTER_NAME).anyTimes(); Map<String, Service> allServiceMap = new HashMap<>(); allServiceMap.put("Service100", service); Map<String, ServiceInfo> allServiceInfoMap = new HashMap<>(); allServiceInfoMap.put("Service100", serviceInfo); ServiceComponentHost serviceComponentHost = createNiceMock(ServiceComponentHost.class); expect(serviceComponentHost.getServiceName()).andReturn("Service100").atLeastOnce(); expect(serviceComponentHost.getServiceComponentName()).andReturn("Component100").atLeastOnce(); expect(serviceComponentHost.getHostName()).andReturn("c6401.ambari.apache.org").atLeastOnce(); List<ServiceComponentHost> serviceComponentHosts = Lists.newArrayList(); serviceComponentHosts.add(serviceComponentHost); expect(cluster.getServiceComponentHosts()).andReturn(serviceComponentHosts).atLeastOnce(); // set expectations expect(managementController.getClusters()).andReturn(clusters).anyTimes(); expect(managementController.getAmbariMetaInfo()).andReturn(ambariMetaInfo).anyTimes(); expect(clusters.getCluster(CLUSTER_NAME)).andReturn(cluster).anyTimes(); expect(cluster.getClusterName()).andReturn(CLUSTER_NAME).atLeastOnce(); expect(cluster.getServices()).andReturn(allServiceMap).anyTimes(); expect(cluster.getService("Service100")).andReturn(service).anyTimes(); expect(cluster.getCurrentStackVersion()).andReturn(currentStackId).anyTimes(); expect(currentStackId.getStackName()).andReturn("Stack100").anyTimes(); expect(currentStackId.getStackVersion()).andReturn("1.0").anyTimes(); expect(targetStackId.getStackId()).andReturn("Stack100-1.1").anyTimes(); expect(targetStackId.getStackName()).andReturn("Stack100").anyTimes(); expect(targetStackId.getStackVersion()).andReturn("1.1").anyTimes(); expect(repoDao.findByPK(1L)).andReturn(repo).anyTimes(); expect(repo.getStackId()).andReturn(targetStackId).atLeastOnce(); expect(repo.getId()).andReturn(1L).atLeastOnce(); expect(repo.getType()).andReturn(RepositoryType.STANDARD).atLeastOnce(); expect(repo.getVersion()).andReturn("1.1.0.0").atLeastOnce(); expect(upgradeHelper.suggestUpgradePack(CLUSTER_NAME, currentStackId, targetStackId, Direction.UPGRADE, UpgradeType.NON_ROLLING, "upgrade_pack11")).andReturn(upgradePack); List<String> prerequisiteChecks = new LinkedList<>(); prerequisiteChecks.add(TEST_SERVICE_CHECK_CLASS_NAME); expect(upgradePack.getPrerequisiteCheckConfig()).andReturn(config); expect(upgradePack.getPrerequisiteChecks()).andReturn(prerequisiteChecks).anyTimes(); expect(upgradePack.getTarget()).andReturn("1.1.*.*").anyTimes(); expect(upgradePack.getOwnerStackId()).andReturn(targetStackId).atLeastOnce(); expect(upgradePack.getType()).andReturn(UpgradeType.ROLLING).atLeastOnce(); expect(ambariMetaInfo.getServices("Stack100", "1.0")).andReturn(allServiceInfoMap).anyTimes(); String checks = ClassLoader.getSystemClassLoader().getResource("checks").getPath(); expect(serviceInfo.getChecksFolder()).andReturn(new File(checks)); URL url = new URL("file://foo"); URLClassLoader classLoader = createNiceMock(URLClassLoader.class); expect(classLoader.getURLs()).andReturn(new URL[] { url }).once(); StackInfo stackInfo = createNiceMock(StackInfo.class); expect(ambariMetaInfo.getStack(targetStackId)).andReturn(stackInfo).atLeastOnce(); expect(stackInfo.getLibraryClassLoader()).andReturn(classLoader).atLeastOnce(); expect(stackInfo.getLibraryInstance(EasyMock.anyObject(), EasyMock.eq(TEST_SERVICE_CHECK_CLASS_NAME))) .andReturn(new SampleServiceCheck()).atLeastOnce(); // mock out plugin check loading Reflections reflectionsMock = createNiceMock(Reflections.class); PowerMockito.whenNew(Reflections.class).withParameterTypes( Configuration.class).withArguments(Matchers.any(ConfigurationBuilder.class)).thenReturn( reflectionsMock); PowerMock.replay(Reflections.class); // replay replayAll(); ResourceProvider provider = getPreUpgradeCheckResourceProvider(managementController, injector); // create the request Request request = PropertyHelper.getReadRequest(new HashSet<>()); PredicateBuilder builder = new PredicateBuilder(); Predicate predicate = builder.property(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_CLUSTER_NAME_PROPERTY_ID).equals(CLUSTER_NAME).and() .property(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_UPGRADE_PACK_PROPERTY_ID).equals("upgrade_pack11").and() .property(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_UPGRADE_TYPE_PROPERTY_ID).equals(UpgradeType.NON_ROLLING).and() .property(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_TARGET_REPOSITORY_VERSION_ID_ID).equals("1").toPredicate(); Set<Resource> resources = Collections.emptySet(); resources = provider.getResources(request, predicate); // make sure all of the checks ran and were returned in the response; some // of the checks are stripped out b/c they don't define any required upgrade // types. The value being asserted here is a combination of built-in checks // which are required for the upgrade type as well as any provided checks // discovered in the stack Assert.assertEquals(20, resources.size()); // find the service check provided by the library classloader and verify it ran Resource customUpgradeCheck = null; for (Resource resource : resources) { String id = (String) resource.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_ID_PROPERTY_ID); if (StringUtils.equals(id, "SAMPLE_SERVICE_CHECK")) { customUpgradeCheck = resource; break; } } assertNotNull(customUpgradeCheck); String description = (String) customUpgradeCheck.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_CHECK_PROPERTY_ID); Assert.assertEquals("Sample service check description.", description); UpgradeCheckStatus status = (UpgradeCheckStatus) customUpgradeCheck.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_STATUS_PROPERTY_ID); Assert.assertEquals(UpgradeCheckStatus.FAIL, status); String reason = (String) customUpgradeCheck.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_REASON_PROPERTY_ID); Assert.assertEquals("Sample service check always fails.", reason); UpgradeCheckType checkType = (UpgradeCheckType) customUpgradeCheck.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_CHECK_TYPE_PROPERTY_ID); Assert.assertEquals(UpgradeCheckType.HOST, checkType); String clusterName = (String) customUpgradeCheck.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_CLUSTER_NAME_PROPERTY_ID); Assert.assertEquals(CLUSTER_NAME, clusterName); UpgradeType upgradeType = (UpgradeType) customUpgradeCheck.getPropertyValue(PreUpgradeCheckResourceProvider.UPGRADE_CHECK_UPGRADE_TYPE_PROPERTY_ID); Assert.assertEquals(UpgradeType.NON_ROLLING, upgradeType); PowerMock.verifyAll(); } /** * This factory method creates PreUpgradeCheckResourceProvider using the mock managementController */ public PreUpgradeCheckResourceProvider getPreUpgradeCheckResourceProvider(AmbariManagementController managementController, Injector injector) throws AmbariException { //UpgradeHelper upgradeHelper = injector.getInstance(UpgradeHelper.class); //injector.injectMembers(upgradeHelper); PreUpgradeCheckResourceProvider provider = new PreUpgradeCheckResourceProvider(managementController); return provider; } private Injector createInjector() throws Exception { return Guice.createInjector(new AbstractModule() { @Override @SuppressWarnings("unchecked") protected void configure() { Clusters clusters = createNiceMock(Clusters.class); Provider<Clusters> clusterProvider = () -> clusters; UpgradeHelper upgradeHelper = createNiceMock(UpgradeHelper.class); Provider<UpgradeHelper> upgradeHelperProvider = () -> upgradeHelper; CheckHelper checkHelper = new CheckHelper(); bind(CheckHelper.class).toInstance(checkHelper); bind(Clusters.class).toProvider(clusterProvider); bind(UpgradeCheckRegistry.class).toProvider(UpgradeCheckRegistryProvider.class); bind(UpgradeHelper.class).toProvider(upgradeHelperProvider); bind(KerberosHelper.class).to(KerberosHelperImpl.class); bind(KerberosIdentityDataFileWriterFactory.class).toInstance(createNiceMock(KerberosIdentityDataFileWriterFactory.class)); bind(KerberosConfigDataFileWriterFactory.class).toInstance(createNiceMock(KerberosConfigDataFileWriterFactory.class)); bind(AuditLogger.class).toInstance(createNiceMock(AuditLogger.class)); bind(ConfigHelper.class).toInstance(createNiceMock(ConfigHelper.class)); bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class)); bind(ActionManager.class).toInstance(createNiceMock(ActionManager.class)); bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class)); bind(ExecutionScheduler.class).toInstance(createNiceMock(ExecutionScheduler.class)); bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementController.class)); bind(ActionDBAccessor.class).toInstance(createNiceMock(ActionDBAccessor.class)); bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class)); bind(ConfigFactory.class).toInstance(createNiceMock(ConfigFactory.class)); bind(ConfigGroupFactory.class).toInstance(createNiceMock(ConfigGroupFactory.class)); bind(CredentialStoreService.class).toInstance(createNiceMock(CredentialStoreService.class)); bind(RequestExecutionFactory.class).toInstance(createNiceMock(RequestExecutionFactory.class)); bind(RequestFactory.class).toInstance(createNiceMock(RequestFactory.class)); bind(RoleCommandOrderProvider.class).toInstance(createNiceMock(RoleCommandOrderProvider.class)); bind(RoleGraphFactory.class).toInstance(createNiceMock(RoleGraphFactory.class)); bind(AbstractRootServiceResponseFactory.class).toInstance(createNiceMock(AbstractRootServiceResponseFactory.class)); bind(ServiceComponentFactory.class).toInstance(createNiceMock(ServiceComponentFactory.class)); bind(ServiceComponentHostFactory.class).toInstance(createNiceMock(ServiceComponentHostFactory.class)); bind(StageFactory.class).toInstance(createNiceMock(StageFactory.class)); bind(HostRoleCommandFactory.class).toInstance(createNiceMock(HostRoleCommandFactory.class)); bind(HookContextFactory.class).toInstance(createNiceMock(HookContextFactory.class)); bind(HookService.class).toInstance(createNiceMock(HookService.class)); bind(PasswordEncoder.class).toInstance(createNiceMock(PasswordEncoder.class)); bind(PersistedState.class).toInstance(createNiceMock(PersistedState.class)); bind(ConfigureClusterTaskFactory.class).toInstance(createNiceMock(ConfigureClusterTaskFactory.class)); bind(TopologyManager.class).toInstance(createNiceMock(TopologyManager.class)); bind(AmbariMetaInfo.class).toInstance(createNiceMock(AmbariMetaInfo.class)); bind(AlertsDAO.class).toInstance(createNiceMock(AlertsDAO.class)); bind(AlertDefinitionDAO.class).toInstance(createNiceMock(AlertDefinitionDAO.class)); bind(RepositoryVersionDAO.class).toInstance(createNiceMock(RepositoryVersionDAO.class)); bind(MpackManagerFactory.class).toInstance(createNiceMock(MpackManagerFactory.class)); Provider<EntityManager> entityManagerProvider = createNiceMock(Provider.class); bind(EntityManager.class).toProvider(entityManagerProvider); bind(new TypeLiteral<Encryptor<AgentConfigsUpdateEvent>>() {}).annotatedWith(Names.named("AgentConfigEncryptor")).toInstance(Encryptor.NONE); bind(new TypeLiteral<Encryptor<AmbariServerConfiguration>>() {}).annotatedWith(Names.named("AmbariServerConfigurationEncryptor")).toInstance(Encryptor.NONE); bind(AmbariLdapConfigurationProvider.class).toInstance(createMock(AmbariLdapConfigurationProvider.class)); requestStaticInjection(PreUpgradeCheckResourceProvider.class); } }); } }
/* * Copyright 2022 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.util.command; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInstance; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import java.util.stream.Stream; import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.junit.jupiter.api.TestInstance.Lifecycle.PER_CLASS; class UrlArgumentTest { private static final String URL_WITH_PASSWORD = "http://username:password@somehere"; private CommandArgument argument; @BeforeEach void setup() { argument = new UrlArgument(URL_WITH_PASSWORD); } @Test void shouldBeTypeOfCommandArgument() { assertThat(new UrlArgument("foo")).isInstanceOf(CommandArgument.class); } @Test void shouldErrorOutIfGivenUrlIsNull() { assertThatCode(() -> new UrlArgument(null)) .isInstanceOf(RuntimeException.class) .hasMessage("Url cannot be null."); } @Nested class originalArgument { @Test void shouldReturnGivenUrlAsItIs() { assertThat(argument.originalArgument()).isEqualTo(URL_WITH_PASSWORD); } } @Nested class forCommandLine { @Test void shouldReturnGivenUrlAsItIs() { final UrlArgument url = new UrlArgument("https://username:password@something/foo"); assertThat(url.forCommandLine()).isEqualTo("https://username:password@something/foo"); } } @Nested @TestInstance(PER_CLASS) class forDisplay { Stream<Arguments> urls() { return Stream.of( Arguments.of("", ""), Arguments.of("http://username:password@somehere", "http://username:******@somehere"), Arguments.of("http://username@somehere", "http://******@somehere"), Arguments.of("http://:@somehere", "http://:******@somehere"), Arguments.of("http://:password@somehere", "http://:******@somehere"), Arguments.of("http://username:@somehere", "http://username:******@somehere"), Arguments.of("http://something/somewhere", "http://something/somewhere") ); } @ParameterizedTest @MethodSource("urls") void shouldMaskPasswordInGivenUrl(String input, String expectedMaskedUrl) { final UrlArgument simpleUrlArgument = new UrlArgument(input); assertThat(simpleUrlArgument.forDisplay()).isEqualTo(expectedMaskedUrl); } @Test void shouldNotMaskWithJustUserForSvnSshProtocol() { String normal = "svn+ssh://user@10.18.7.51:8153"; UrlArgument url = new UrlArgument(normal); assertThat(url.forDisplay()).isEqualTo("svn+ssh://user@10.18.7.51:8153"); } @Test void shouldNotMaskWithJustUserForSshProtocol() { String normal = "ssh://user@10.18.7.51:8153"; UrlArgument url = new UrlArgument(normal); assertThat(url.forDisplay()).isEqualTo("ssh://user@10.18.7.51:8153"); } @Test void shouldMaskWithUsernameAndPasswordForSshProtocol() { String normal = "ssh://user:password@10.18.7.51:8153"; UrlArgument url = new UrlArgument(normal); assertThat(url.forDisplay()).isEqualTo("ssh://user:******@10.18.7.51:8153"); } //BUG #5471 @Test void shouldMaskAuthTokenInUrl() { UrlArgument url = new UrlArgument("https://9bf58jhrb32f29ad0c3983a65g594f1464jgf9a3@somewhere"); assertThat(url.forDisplay()).isEqualTo("https://******@somewhere"); } } @Nested class toString { @Test void shouldReturnValueForToString() { assertThat(argument.toString()).isEqualTo("http://username:******@somehere"); } @Test void shouldNotChangeNormalURL() { String normal = "http://normal/foo/bar/baz?a=b&c=d#fragment"; UrlArgument url = new UrlArgument(normal); assertThat(url.toString()).isEqualTo(normal); } @Test void shouldWorkWithSvnSshUrl() { String normal = "svn+ssh://user:password@10.18.7.51:8153"; UrlArgument url = new UrlArgument(normal); assertThat(url.toString()).isEqualTo("svn+ssh://user:******@10.18.7.51:8153"); } @Test void shouldIgnoreArgumentsThatAreNotRecognisedUrls() { String notAUrl = "C:\\foo\\bar\\baz"; UrlArgument url = new UrlArgument(notAUrl); assertThat(url.toString()).isEqualTo(notAUrl); } } @Nested class Equals { @Test void shouldBeEqualBasedOnRawUrl() { UrlArgument url1 = new UrlArgument("svn+ssh://user:password@10.18.7.51:8153"); UrlArgument url3 = new UrlArgument("svn+ssh://user:password@10.18.7.51:8153"); assertThat(url1).isEqualTo(url3); } @Test void shouldBeEqualBasedOnRawUrl1() { UrlArgument url1 = new UrlArgument("svn+ssh://user:password@10.18.7.51:8153"); UrlArgument url3 = new UrlArgument("svn+ssh://user:password@10.18.7.51:8153"); assertThat(url1).isEqualTo(url3); } @Test void shouldBeEqualBasedOnRawUrlForHttpUrls() { UrlArgument url1 = new UrlArgument("http://user:password@10.18.7.51:8153"); UrlArgument url2 = new UrlArgument("http://user:other@10.18.7.51:8153"); UrlArgument url3 = new UrlArgument("http://user:password@10.18.7.51:8153"); assertThat(url1).isEqualTo(url3); assertThat(url1).isNotEqualTo(url2); } @Test void shouldIgnoreTrailingSlashesOnURIs() { UrlArgument url1 = new UrlArgument("file:///not-exist/svn/trunk/"); UrlArgument url2 = new UrlArgument("file:///not-exist/svn/trunk"); assertThat(url1).isEqualTo(url2); } } @Nested class withoutCredentials { @Test void shouldRemoveCredentials() { final UrlArgument url = new UrlArgument("https://username:password@something/foo"); assertThat(url.withoutCredentials()).isEqualTo("https://something/foo"); } } //BUG #2973 @Nested @TestInstance(PER_CLASS) class replaceSecretInfo { Stream<Arguments> urls() { return Stream.of( Arguments.of("http://username:password@somewhere?name=bob", "http://username:******@somewhere?name=bob"), Arguments.of("http://username:@somewhere/gocd/gocd.git", "http://username:******@somewhere/gocd/gocd.git"), Arguments.of("http://somewhere:1234/gocd/gocd.git", "http://somewhere:1234/gocd/gocd.git") ); } @Test void shouldReturnLineAsItIsIfLineIsBlank() { final UrlArgument urlArgument = new UrlArgument("http://username:password@somewhere?name=bob"); assertThat(urlArgument.replaceSecretInfo("")).isEqualTo(""); } @Test void shouldReturnLineAsItIsIfLineIsNull() { final UrlArgument urlArgument = new UrlArgument("http://username:password@somewhere?name=bob"); assertThat(urlArgument.replaceSecretInfo(null)).isEqualTo(null); } @Test void shouldReturnLineAsItIsIfUrlIsBlank() { final UrlArgument urlArgument = new UrlArgument(""); assertThat(urlArgument.replaceSecretInfo("some-content")).isEqualTo("some-content"); } @ParameterizedTest @MethodSource("urls") void shouldMaskPasswordInGivenConsoleOutput(String input, String maskedUrl) { final UrlArgument urlArgument = new UrlArgument(input); final String originalLine = format("[go] Start updating repo at revision 08e7cc03 from %s", input); final String expectedLine = format("[go] Start updating repo at revision 08e7cc03 from %s", maskedUrl); assertThat(urlArgument.replaceSecretInfo(originalLine)).isEqualTo(expectedLine); } @Test void shouldMaskMultipleOccurrencesOfUserInfo() { final String url = "http://username:password@somewhere?name=bob"; final String originalLine = format("[go] echoing same url twice: %s and %s", url, url); final UrlArgument urlArgument = new UrlArgument(url); final String actual = urlArgument.replaceSecretInfo(originalLine); final String maskedUrl = "http://username:******@somewhere?name=bob"; final String expectedLine = format("[go] echoing same url twice: %s and %s", maskedUrl, maskedUrl); assertThat(actual).isEqualTo(expectedLine); } @Test void shouldReplaceAllThePasswordsInSvnInfo() { String output = "<?xml version=\"1.0\"?>\n" + "<info>\n" + "<entry\n" + " kind=\"dir\"\n" + " path=\".\"\n" + " revision=\"294\">\n" + "<url>http://cce:password@10.18.3.171:8080/svn/connect4/trunk</url>\n" + "<repository>\n" + "<root>http://cce:password@10.18.3.171:8080/svn/connect4</root>\n" + "<uuid>b7cc39fa-2f96-0d44-9079-2001927d4b22</uuid>\n" + "</repository>\n" + "<wc-info>\n" + "<schedule>normal</schedule>\n" + "<depth>infinity</depth>\n" + "</wc-info>\n" + "<commit\n" + " revision=\"294\">\n" + "<author>cce</author>\n" + "<date>2009-06-09T06:13:05.109375Z</date>\n" + "</commit>\n" + "</entry>\n" + "</info>"; UrlArgument url = new UrlArgument("http://cce:password@10.18.3.171:8080/svn/connect4/trunk"); String result = url.replaceSecretInfo(output); assertThat(result).contains("<url>http://cce:******@10.18.3.171:8080/svn/connect4/trunk</url>"); assertThat(result).contains("<root>http://cce:******@10.18.3.171:8080/svn/connect4</root>"); assertThat(result).doesNotContain("cce:password"); } } @Nested @TestInstance(PER_CLASS) class isValidURL { Stream<Arguments> urls() { return Stream.of( Arguments.of("http://my-site.com/abc", true), Arguments.of("svn+ssh://my-site.com/def", true), Arguments.of("file://my-site.com/def", true), Arguments.of("/path/in/file/system", true), Arguments.of("git@github.com:org/repo.git", true), Arguments.of("user@my-site.com:org/repo.git", true), Arguments.of("1a2b3c://abc", true), Arguments.of("-xyz", false), Arguments.of("_xyz", false), Arguments.of("@xyz", false) ); } @ParameterizedTest @MethodSource("urls") void shouldValidateURLs(String input, boolean expectedValidity) { final UrlArgument url = new UrlArgument(input); assertThat(url.isValidURLOrLocalPath()).as("Verify validity of '%s' as a URL", url.originalArgument()).isEqualTo(expectedValidity); } } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package org.jitsi.impl.neomedia.codec.video; import org.jitsi.util.*; /** * A wrapper for the libvpx native library. * See {@link "http://www.webmproject.org/docs/"} * * @author Boris Grozev */ public class VPX { /** * Operation completed without error. * Corresponds to <tt>VPX_CODEC_OK</tt> from <tt>vpx/vpx_codec.h</tt> */ public static final int CODEC_OK = 0; /** * An iterator reached the end of list. * Corresponds to <tt>VPX_CODEC_LIST_END</tt> from <tt>vpx/vpx_codec.h</tt> */ public static final int CODEC_LIST_END = 9; /** * Use eXternal Memory Allocation mode flag * Corresponds to <tt>VPX_CODEC_USE_XMA</tt> from <tt>vpx/vpx_codec.h</tt> */ public static final int CODEC_USE_XMA = 0x00000001; /** * Output one partition at a time. Each partition is returned in its own * <tt>VPX_CODEC_CX_FRAME_PKT</tt>. */ public static final int CODEC_USE_OUTPUT_PARTITION = 0x20000; /** * Improve resiliency against losses of whole frames. * * To set this option for an encoder, enable this bit in the value passed * to <tt>vpx_enc_cft_set_error_resilient</tt> for the encoder's * configuration. * * Corresponds to <tt>VPX_ERROR_RESILIENT_DEFAULT</tt> from * <tt>vpx/vpx_encoder.h</tt> */ public static final int ERROR_RESILIENT_DEFAULT = 0x1; /** * The frame partitions are independently decodable by the bool decoder, * meaning that partitions can be decoded even though earlier partitions * have been lost. Note that intra predicition is still done over the * partition boundary. * * To set this option for Coan encoder, enable this bit in the value passed * to <tt>vpx_enc_cft_set_error_resilient</tt> for the encoder's * configuration. * * Corresponds to <tt>VPX_ERROR_RESILIENT_PARTITIONS</tt> from * <tt>vpx/vpx_encoder.h</tt> */ public static final int ERROR_RESILIENT_PARTITIONS = 0x2; /** * I420 format constant * Corresponds to <tt>VPX_IMG_FMT_I420</tt> from <tt>vpx/vpx_image.h</tt> */ public static final int IMG_FMT_I420 = 258; /** * Variable Bitrate mode. * Corresponds to <tt>VPX_VBR</tt> from <tt>vpx/vpx_encoder.h</tt> */ public static final int RC_MODE_VBR = 0; /** * Constant Bitrate mode. * Corresponds to <tt>VPX_CBR</tt> from <tt>vpx/vpx_encoder.h</tt> */ public static final int RC_MODE_CBR = 1; /** * Constant Quality mode. * Corresponds to <tt>VPX_CQ</tt> from <tt>vpx/vpx_encoder.h</tt> */ public static final int RC_MODE_CQ = 2; /** * Encoder determines optimal placement automatically. * Corresponds to <tt>VPX_KF_AUTO</tt> from in <tt>vpx/vpx_encoder.h</tt> */ public static final int KF_MODE_AUTO = 1; /** * Encoder does not place keyframes. * Corresponds to <tt>VPX_KF_DISABLED</tt> from <tt>vpx/vpx_encoder.h</tt> */ public static final int KF_MODE_DISABLED = 1; /** * Process and return as soon as possible ('realtime' deadline) * Corresponds to <tt>VPX_DL_REALTIME</tt> from <tt>vpx/vpx_encoder.h</tt> */ public static final int DL_REALTIME = 1; /** * Compressed video frame packet type. * Corresponds to <tt>VPX_CODEC_CX_FRAME_PKT</tt> from * <tt>vpx/vpx_encoder.h</tt> */ public static final int CODEC_CX_FRAME_PKT = 0; /** * Constant for VP8 decoder interface */ public static final int INTEFACE_VP8_DEC = 0; /** * Constant for VP8 encoder interface */ public static final int INTERFACE_VP8_ENC = 1; /** * Allocates memory for a <tt>vpx_codec_ctx_t</tt> on the heap. * * @return A pointer to the allocated memory. */ public static native long codec_ctx_malloc(); /** * Initializes a vpx decoder context. * @param context Pointer to a pre-allocated <tt>vpx_codec_ctx_t</tt>. * @param iface Interface to be used. Has to be one of the * <tt>VPX.INTERFACE_*</tt> constants. * @param cfg Pointer to a pre-allocated <tt>vpx_codec_dec_cfg_t</tt>, may * be 0. * @param flags Flags. * * @return <tt>CODEC_OK</tt> on success, or an error code otherwise. The * error code can be converted to a <tt>String</tt> with * {@link VPX#codec_err_to_string(int)} */ public static native int codec_dec_init(long context, int iface, long cfg, long flags); /** * Decodes the frame in <tt>buf</tt>, at offset <tt>buf_offset</tt>. * * @param context The context to use. * @param buf Encoded frame buffer. * @param buf_offset Offset into <tt>buf</tt> where the encoded frame begins. * @param buf_size Size of the encoded frame. * @param user_priv Application specific data to associate with this frame. * @param deadline Soft deadline the decoder should attempt to meet, * in microseconds. Set to zero for unlimited. * * @return <tt>CODEC_OK</tt> on success, or an error code otherwise. The * error code can be converted to a <tt>String</tt> with * {@link VPX#codec_err_to_string(int)} */ public static native int codec_decode(long context, byte[] buf, int buf_offset, int buf_size, long user_priv, long deadline); /** * Gets the next frame available to display from the decoder context * <tt>context</tt>. * The list of available frames becomes valid upon completion of the * <tt>codec_decode</tt> call, and remains valid until the next call to * <tt>codec_decode</tt>. * * @param context The decoder context to use. * @param iter Iterator storage, initialized by setting its first element * to 0. * * @return Pointer to a <tt>vpx_image_t</tt> describing the decoded frame, * or 0 if no more frames are available */ public static native long codec_get_frame(long context, long[] iter); /** * Destroys a codec context, freeing any associated memory buffers. * * @param context Pointer to the <tt>vpx_codec_ctx_t</tt> context to destroy. * * @return <tt>CODEC_OK</tt> on success, or an error code otherwise. The * error code can be converted to a <tt>String</tt> with * {@link VPX#codec_err_to_string(int)} */ public static native int codec_destroy(long context); /** * Initializes a vpx encoder context. * * @param context Pointer to a pre-allocated <tt>vpx_codec_ctx_t</tt>. * @param iface Interface to be used. Has to be one of the * <tt>VPX.INTERFACE_*</tt> constants. * @param cfg Pointer to a pre-allocated <tt>vpx_codec_enc_cfg_t</tt>, * may be 0. * @param flags Flags. * * @return <tt>CODEC_OK</tt> on success, or an error code otherwise. The * error code can be converted to a <tt>String</tt> with * {@link VPX#codec_err_to_string(int)} */ public static native int codec_enc_init(long context, int iface, long cfg, long flags); /** * * @param context Pointer to the codec context on which to set the * confirutation * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt> to set. * * @return <tt>CODEC_OK</tt> on success, or an error code otherwise. The * error code can be converted to a <tt>String</tt> with * {@link VPX#codec_err_to_string(int)} */ public static native int codec_enc_config_set(long context, long cfg); /** * Encodes the frame described by <tt>img</tt>, <tt>buf</tt>, * <tt>offset0</tt>, <tt>offset1</tt> and <tt>offset2</tt>. * * Note that <tt>buf</tt> and the offsets describe where the frames is * stored, but <tt>img</tt> has to have all of its other parameters (format, * dimensions, strides) already set. * * The reason <tt>buf</tt> and the offsets are treated differently is to * allow for the encoder to operate on java memory and avoid copying the raw * frame to native memory. * * @param context Pointer to the codec context to use. * @param img Pointer to a <tt>vpx_image_t</tt> describing the raw frame * @param buf Contains the raw frame * @param offset0 Offset of the first plane * @param offset1 Offset of the second plane * @param offset2 Offset of the third plane * @param pts Presentation time stamp, in timebase units. * @param duration Duration to show frame, in timebase units. * @param flags Flags to use for encoding this frame. * @param deadline Time to spend encoding, in microseconds. (0=infinite) * * @return <tt>CODEC_OK</tt> on success, or an error code otherwise. The * error code can be converted to a <tt>String</tt> with * {@link VPX#codec_err_to_string(int)} */ public static native int codec_encode(long context, long img, byte[] buf, int offset0, int offset1, int offset2, long pts, long duration, long flags, long deadline); /** * Encoded data iterator. * Iterates over a list of data packets to be passed from the encoder to * the application. The kind of a packet can be determined using * {@link VPX#codec_cx_pkt_get_kind} * Packets of kind <tt>CODEC_CX_FRAME_PKT</tt> should be passed to the * application's muxer. * * @param context The codec context to use. * @param iter Iterator storage, initialized by setting its first element * to 0. * * @return Pointer to a vpx_codec_cx_pkt_t containing the output data * packet, or 0 to indicate the end of available packets */ public static native long codec_get_cx_data(long context, long[] iter); /** * Returns the <tt>kind</tt> of the <tt>vpx_codec_cx_pkt_t</tt> pointed to * by <tt>pkt</tt>. * * @param pkt Pointer to the <tt>vpx_codec_cx_pkt_t</tt> to return the * <tt>kind</tt> of. * @return The kind of <tt>pkt</tt>. */ public static native int codec_cx_pkt_get_kind(long pkt); /** * Returns the size of the data in the <tt>vpx_codec_cx_pkt_t</tt> pointed * to by <tt>pkt</tt>. Can only be used for packets of <tt>kind</tt> * <tt>CODEC_CX_FRAME_PKT</tt>. * * @param pkt Pointer to a <tt>vpx_codec_cx_pkt_t</tt>. * * @return The size of the data of <tt>pkt</tt>. */ public static native int codec_cx_pkt_get_size(long pkt); /** * Returns a pointer to the data in the <tt>vpx_codec_cx_pkt_t</tt> pointed * to by<tt>pkt</tt>. Can only be used for packets of <tt>kind</tt> * <tt>CODEC_CX_FRAME_PKT</tt>. * * @param pkt Pointer to the <tt>vpx_codec_cx_pkt_t</tt>. * @return Pointer to the data of <tt>pkt</tt>. */ public static native long codec_cx_pkt_get_data(long pkt); //img /** * Allocates memory for a <tt>vpx_image_t</tt> on the heap. * * @return A pointer to the allocated memory. */ public static native long img_malloc(); /** * Returns the value of the <tt>w</tt> (width) field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>w</tt> (width) field of <tt>img</tt>. */ public static native int img_get_w(long img); /** * Returns the value of the <tt>h</tt> (height) field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>h</tt> (height) field of <tt>img</tt>. */ public static native int img_get_h(long img); /** * Returns the value of the <tt>d_w</tt> (displayed width) field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>d_w</tt> (displayed width) field of <tt>img</tt>. */ public static native int img_get_d_w(long img); /** * Returns the value of the <tt>d_h</tt> (displayed height) field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>d_h</tt> (displayed height) field of <tt>img</tt>. */ public static native int img_get_d_h(long img); /** * Returns the value of the <tt>planes[0]</tt> field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>planes[0]</tt> field of <tt>img</tt>. */ public static native long img_get_plane0(long img); /** * Returns the value of the <tt>planes[1]</tt> field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>planes[1]</tt> field of <tt>img</tt>. */ public static native long img_get_plane1(long img); /** * Returns the value of the <tt>planes[2]</tt> field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>planes[2]</tt> field of <tt>img</tt>. */ public static native long img_get_plane2(long img); /** * Returns the value of the <tt>stride[0]</tt> field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>stride[0]</tt> field of <tt>img</tt>. */ public static native int img_get_stride0(long img); /** * Returns the value of the <tt>stride[1]</tt> field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>stride[1]</tt> field of <tt>img</tt>. */ public static native int img_get_stride1(long img); /** * Returns the value of the <tt>stride[2]</tt> field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>stride[2]</tt> field of <tt>img</tt>. */ public static native int img_get_stride2(long img); /** * Returns the value of the <tt>fmt</tt> field of a * <tt>vpx_image_t</tt>. * * @param img Pointer to a <tt>vpx_image_t</tt>. * * @return The <tt>fmt</tt> field of <tt>img</tt>. */ public static native int img_get_fmt(long img); /** * Sets the <tt>w</tt> (width) field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_w(long img, int value); /** * Sets the <tt>h</tt> (height) field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_h(long img, int value); /** * Sets the <tt>d_w</tt> (displayed width) field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_d_w(long img, int value); /** * Sets the <tt>d_h</tt> (displayed height) field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_d_h(long img, int value); /** * Sets the <tt>stride[0]</tt> field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_stride0(long img, int value); /** * Sets the <tt>stride[1]</tt> field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_stride1(long img, int value); /** * Sets the <tt>stride[2]</tt> field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_stride2(long img, int value); /** * Sets the <tt>stride[3]</tt> field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_stride3(long img, int value); /** * Sets the <tt>fmt</tt> (format) field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_fmt(long img, int value); /** * Sets the <tt>bps</tt> (bits per sample) field of a <tt>vpx_image_t</tt>. * @param img Pointer to a <tt>vpx_image_t</tt>. * @param value The value to set. */ public static native void img_set_bps(long img, int value); /** * Open a descriptor, using existing storage for the underlying image. * * Returns a descriptor for storing an image of the given format. The * storage for descriptor has been allocated elsewhere, and a descriptor is * desired to "wrap" that storage. * * @param img Pointer to a <tt>vpx_image_t</tt>. * @param fmt Format of the image. * @param d_w Width of the image. * @param d_h Height of the image. * @param align Alignment, in bytes, of each row in the image. * @param data Storage to use for the image */ public static native void img_wrap(long img, int fmt, int d_w, int d_h, int align, long data); /** * Allocates memory for a <tt>vpx_codec_dec_cfg_t</tt> on the heap. * * @return A pointer to the allocated memory. */ public static native long codec_dec_cfg_malloc(); /** * Sets the <tt>w</tt> field of a <tt>vpx_codec_dec_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_dec_cfg_t</tt>. * @param value The value to set. */ public static native void codec_dec_cfg_set_w(long cfg, int value); /** * Sets the <tt>h</tt> field of a <tt>vpx_codec_dec_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_dec_cfg_t</tt>. * @param value The value to set. */ public static native void codec_dec_cfg_set_h(long cfg, int value); /** * Allocates memory for a <tt>vpx_codec_enc_cfg_t</tt> on the heap. * * @return A pointer to the allocated memory. */ public static native long codec_enc_cfg_malloc(); /** * Initializes a encoder configuration structure with default values. * * @param iface Interface. Should be one of the <tt>INTERFACE_*</tt> * constants * @param cfg Pointer to the vpx_codec_enc_cfg_t to initialize * @param usage End usage. Set to 0 or use codec specific values. * * @return <tt>CODEC_OK</tt> on success, or an error code otherwise. The * error code can be converted to a <tt>String</tt> with * {@link VPX#codec_err_to_string(int)} */ public static native int codec_enc_config_default(int iface, long cfg, int usage); /** * Sets the <tt>g_profile</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt> * @param value The value to set. */ public static native void codec_enc_cfg_set_profile(long cfg, int value); /** * Sets the <tt>g_threads</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_threads(long cfg, int value); /** * Sets the <tt>g_w</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_w(long cfg, int value); /** * Sets the <tt>g_h</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_h(long cfg, int value); /** * Sets the <tt>g_error_resilient</tt> field of a * <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_error_resilient(long cfg, int value); /** * Sets the <tt>rc_target_bitrate</tt> field of a * <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_target_bitrate(long cfg, int value); /** * Sets the <tt>rc_dropframe_thresh</tt> field of a * <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_dropframe_thresh(long cfg, int value); /** * Sets the <tt>rc_resize_allowed</tt> field of a * <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_resize_allowed(long cfg, int value); /** * Sets the <tt>rc_resize_up_thresh</tt> field of a * <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_resize_up_thresh(long cfg, int value); /** * Sets the <tt>rc_resize_down_thresh</tt> field of a * <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_resize_down_thresh(long cfg, int value); /** * Sets the <tt>rc_end_usage</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_end_usage(long cfg, int value); /** * Sets the <tt>rc_min_quantizer</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_min_quantizer(long cfg, int value); /** * Sets the <tt>rc_max_quantizer</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_max_quantizer(long cfg, int value); /** * Sets the <tt>rc_undershoot_pct</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_undershoot_pct(long cfg, int value); /** * Sets the <tt>rc_overshoot_pct</tt> field of a * <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_overshoot_pct(long cfg, int value); /** * Sets the <tt>rc_buf_sz</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_buf_sz(long cfg, int value); /** * Sets the <tt>rc_buf_initial_sz</tt> field of a * <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_buf_initial_sz(long cfg, int value); /** * Sets the <tt>rc_buf_optimal_sz</tt> field of a * <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_rc_buf_optimal_sz(long cfg, int value); /** * Sets the <tt>kf_mode</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_kf_mode(long cfg, int value); /** * Sets the <tt>kf_min_dist</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_kf_min_dist(long cfg, int value); /** * Sets the <tt>kf_max_dist</tt> field of a <tt>vpx_codec_enc_cfg_t</tt>. * * @param cfg Pointer to a <tt>vpx_codec_enc_cfg_t</tt>. * @param value The value to set. */ public static native void codec_enc_cfg_set_kf_max_dist(long cfg, int value); /** * Allocates memory for a <tt>vpx_codec_stream_info_t</tt> on the heap. * * @return A pointer to the allocated memory. */ public static native long stream_info_malloc(); /** * Returns the <tt>w</tt> field of a <tt>vpx_codec_stream_info_t</tt>. * * @param stream_info Pointer to a <tt>vpx_codec_stream_info_t</tt>. * * @return The <tt>w</tt> field of a <tt>stream_info</tt>. */ public static native int stream_info_get_w(long stream_info); /** * Returns the <tt>h</tt> field of a <tt>vpx_codec_stream_info_t</tt>. * * @param stream_info Pointer to a <tt>vpx_codec_stream_info_t</tt>. * * @return The <tt>h</tt> field of a <tt>stream_info</tt>. */ public static native int stream_info_get_h(long stream_info); /** * Returns the <tt>is_kf</tt> field of a <tt>vpx_codec_stream_info_t</tt>. * * @param stream_info Pointer to a <tt>vpx_codec_stream_info_t</tt>. * * @return The <tt>w</tt> field of a <tt>stream_info</tt>. */ public static native int stream_info_get_is_kf(long stream_info); /** * Performs high level parsing of the bitstream. Construction of a decoder * context is not necessary. Can be used to determine if the bitstream is * of the proper format, and to extract information from the stream. * * @param iface Interface, should be one of the <tt>INTERFACE_*</tt> * constants. * @param buf Buffer containing a compressed frame. * @param buf_offset Offset into <tt>buf</tt> where the compressed frame * begins. * @param buf_size Size of the compressed frame. * @param si_ptr Pointer to a <tt>vpx_codec_stream_info_t</tt> which will * be filled with information about the compressed frame. * * @return <tt>CODEC_OK</tt> on success, or an error code otherwise. The * error code can be converted to a <tt>String</tt> with * {@link VPX#codec_err_to_string(int)} */ public static native int codec_peek_stream_info(int iface, byte[] buf, int buf_offset, int buf_size, long si_ptr); /** * Allocates memorry on the heap (a simple wrapped around the native * <tt>malloc()</tt>) * @param s Number of bytes to allocate * * @return Pointer to the memory allocated. */ public static native long malloc(long s); /** * Frees memory, which has been allocated with {@link VPX#malloc(long)} or * one of the <tt>*_malloc()</tt> functions. * * @param ptr Pointer to the memory to free. */ public static native void free(long ptr); /** * Copies <tt>n</tt> bytes from <tt>src</tt> to <tt>dst</tt>. Simple wrapper * around the native <tt>memcpy()</tt> funciton. * * @param dst Destination. * @param src Source. * @param n Number of bytes to copy. */ public static native void memcpy(byte[] dst, long src, int n); /** * Fills in <tt>buf</tt> with a string description of the error code * <tt>err</tt>. Fills at most <tt>buf_size</tt> bytes of <tt>buf</tt> * * @param err Error code * @param buf Buffer to copy the string into * @param buf_size Buffer size * * @return The number of bytes written to <tt>buf</tt> */ public static native int codec_err_to_string(int err, byte[] buf, int buf_size); /** * Returns a <tt>String</tt> describing the error code <tt>err</tt>. * @param err Error code * * @return A <tt>String</tt> describing the error code <tt>err</tt>. */ public static String codec_err_to_string(int err) { byte[] buf = new byte[100]; codec_err_to_string(err, buf, buf.length); return new String(buf); } static { JNIUtils.loadLibrary("jnvpx", VPX.class.getClassLoader()); } /** * Java wrapper around vpx_codec_stream_info_t. Contains basic information, * obtainable from an encoded frame without a decoder context. */ static class StreamInfo { /** * Width */ int w; /** * Height */ int h; /** * Is keyframe */ boolean is_kf; /** * Initializes this instance by parsing <tt>buf</tt> * * @param iface Interface, should be one of the <tt>INTERFACE_*</tt> * constants. * @param buf Buffer containing a compressed frame to parse. * @param buf_offset Offset into buffer where the compressed frame * begins. * @param buf_size Size of the compressed frame. */ StreamInfo(int iface, byte[] buf, int buf_offset, int buf_size) { long si = stream_info_malloc(); if(codec_peek_stream_info(iface, buf, buf_offset, buf_size, si) != CODEC_OK) return; w = stream_info_get_w(si); h = stream_info_get_h(si); is_kf = stream_info_get_is_kf(si) != 0; if(si != 0) free(si); } /** * Gets the <tt>w</tt> (width) field of this instance. * * @return the <tt>w</tt> (width) field of this instance. */ public int getW() { return w; } /** * Gets the <tt>h</tt> (height) field of this instance. * * @return the <tt>h</tt> (height) field of this instance. */ public int getH() { return h; } /** * Gets the <tt>is_kf</tt> (is keyframe) field of this instance. * * @return the <tt>is_kf</tt> (is keyframe) field of this instance. */ public boolean isKf() { return is_kf; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.profile; import org.elasticsearch.action.search.*; import org.elasticsearch.search.SearchHit; import org.apache.lucene.util.English; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.index.query.*; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import java.util.Arrays; import java.util.List; import java.util.Map; import static org.elasticsearch.search.profile.RandomQueryGenerator.randomQueryBuilder; import static org.elasticsearch.test.hamcrest.DoubleMatcher.nearlyEqual; import static org.hamcrest.Matchers.*; public class QueryProfilerIT extends ESIntegTestCase { /** * This test simply checks to make sure nothing crashes. Test indexes 100-150 documents, * constructs 20-100 random queries and tries to profile them */ public void testProfileQuery() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } List<String> stringFields = Arrays.asList("field1"); List<String> numericFields = Arrays.asList("field2"); indexRandom(true, docs); refresh(); int iters = between(20, 100); for (int i = 0; i < iters; i++) { QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); logger.info(q.toString()); SearchResponse resp = client().prepareSearch() .setQuery(q) .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); for (Map.Entry<String, List<ProfileShardResult>> shard : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shard.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); } CollectorResult result = searchProfiles.getCollectorResult(); assertThat(result.getName(), not(isEmptyOrNullString())); assertThat(result.getTime(), greaterThan(0L)); } } } } /** * This test generates 1-10 random queries and executes a profiled and non-profiled * search for each query. It then does some basic sanity checking of score and hits * to make sure the profiling doesn't interfere with the hits being returned */ public void testProfileMatchesRegular() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } List<String> stringFields = Arrays.asList("field1"); List<String> numericFields = Arrays.asList("field2"); indexRandom(true, docs); refresh(); int iters = between(1, 10); for (int i = 0; i < iters; i++) { QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); logger.info(q.toString()); SearchRequestBuilder vanilla = client().prepareSearch("test") .setQuery(q) .setProfile(false) .addSort("_score", SortOrder.DESC) .addSort("_uid", SortOrder.ASC) .setPreference("_primary") .setSearchType(SearchType.QUERY_THEN_FETCH); SearchRequestBuilder profile = client().prepareSearch("test") .setQuery(q) .setProfile(true) .addSort("_score", SortOrder.DESC) .addSort("_uid", SortOrder.ASC) .setPreference("_primary") .setSearchType(SearchType.QUERY_THEN_FETCH); MultiSearchResponse.Item[] responses = client().prepareMultiSearch() .add(vanilla) .add(profile) .execute().actionGet().getResponses(); SearchResponse vanillaResponse = responses[0].getResponse(); SearchResponse profileResponse = responses[1].getResponse(); float vanillaMaxScore = vanillaResponse.getHits().getMaxScore(); float profileMaxScore = profileResponse.getHits().getMaxScore(); if (Float.isNaN(vanillaMaxScore)) { assertTrue("Vanilla maxScore is NaN but Profile is not [" + profileMaxScore + "]", Float.isNaN(profileMaxScore)); } else { assertTrue("Profile maxScore of [" + profileMaxScore + "] is not close to Vanilla maxScore [" + vanillaMaxScore + "]", nearlyEqual(vanillaMaxScore, profileMaxScore, 0.001)); } assertThat("Profile totalHits of [" + profileResponse.getHits().totalHits() + "] is not close to Vanilla totalHits [" + vanillaResponse.getHits().totalHits() + "]", vanillaResponse.getHits().getTotalHits(), equalTo(profileResponse.getHits().getTotalHits())); SearchHit[] vanillaHits = vanillaResponse.getHits().getHits(); SearchHit[] profileHits = profileResponse.getHits().getHits(); for (int j = 0; j < vanillaHits.length; j++) { assertThat("Profile hit #" + j + " has a different ID from Vanilla", vanillaHits[j].getId(), equalTo(profileHits[j].getId())); } } } /** * This test verifies that the output is reasonable for a simple, non-nested query */ public void testSimpleMatch() throws Exception { createIndex("test"); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } indexRandom(true, docs); ensureGreen(); QueryBuilder q = QueryBuilders.matchQuery("field1", "one"); SearchResponse resp = client().prepareSearch() .setQuery(q) .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .execute().actionGet(); Map<String, List<ProfileShardResult>> p = resp.getProfileResults(); assertNotNull(p); for (Map.Entry<String, List<ProfileShardResult>> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertEquals(result.getQueryName(), "TermQuery"); assertEquals(result.getLuceneDescription(), "field1:one"); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); } CollectorResult result = searchProfiles.getCollectorResult(); assertThat(result.getName(), not(isEmptyOrNullString())); assertThat(result.getTime(), greaterThan(0L)); } } } /** * This test verifies that the output is reasonable for a nested query */ public void testBool() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } indexRandom(true, docs); QueryBuilder q = QueryBuilders.boolQuery().must(QueryBuilders.matchQuery("field1", "one")).must(QueryBuilders.matchQuery("field1", "two")); SearchResponse resp = client().prepareSearch() .setQuery(q) .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .execute().actionGet(); Map<String, List<ProfileShardResult>> p = resp.getProfileResults(); assertNotNull(p); for (Map.Entry<String, List<ProfileShardResult>> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertEquals(result.getQueryName(), "BooleanQuery"); assertEquals(result.getLuceneDescription(), "+field1:one +field1:two"); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); assertEquals(result.getProfiledChildren().size(), 2); // Check the children List<ProfileResult> children = result.getProfiledChildren(); assertEquals(children.size(), 2); ProfileResult childProfile = children.get(0); assertEquals(childProfile.getQueryName(), "TermQuery"); assertEquals(childProfile.getLuceneDescription(), "field1:one"); assertThat(childProfile.getTime(), greaterThan(0L)); assertNotNull(childProfile.getTimeBreakdown()); assertEquals(childProfile.getProfiledChildren().size(), 0); childProfile = children.get(1); assertEquals(childProfile.getQueryName(), "TermQuery"); assertEquals(childProfile.getLuceneDescription(), "field1:two"); assertThat(childProfile.getTime(), greaterThan(0L)); assertNotNull(childProfile.getTimeBreakdown()); } CollectorResult result = searchProfiles.getCollectorResult(); assertThat(result.getName(), not(isEmptyOrNullString())); assertThat(result.getTime(), greaterThan(0L)); } } } /** * Tests a boolean query with no children clauses */ public void testEmptyBool() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } indexRandom(true, docs); refresh(); QueryBuilder q = QueryBuilders.boolQuery(); logger.info(q.toString()); SearchResponse resp = client().prepareSearch() .setQuery(q) .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); for (Map.Entry<String, List<ProfileShardResult>> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); } CollectorResult result = searchProfiles.getCollectorResult(); assertThat(result.getName(), not(isEmptyOrNullString())); assertThat(result.getTime(), greaterThan(0L)); } } } /** * Tests a series of three nested boolean queries with a single "leaf" match query. * The rewrite process will "collapse" this down to a single bool, so this tests to make sure * nothing catastrophic happens during that fairly substantial rewrite */ public void testCollapsingBool() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } indexRandom(true, docs); refresh(); QueryBuilder q = QueryBuilders.boolQuery().must(QueryBuilders.boolQuery().must(QueryBuilders.boolQuery().must(QueryBuilders.matchQuery("field1", "one")))); logger.info(q.toString()); SearchResponse resp = client().prepareSearch() .setQuery(q) .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); for (Map.Entry<String, List<ProfileShardResult>> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); } CollectorResult result = searchProfiles.getCollectorResult(); assertThat(result.getName(), not(isEmptyOrNullString())); assertThat(result.getTime(), greaterThan(0L)); } } } public void testBoosting() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } indexRandom(true, docs); refresh(); QueryBuilder q = QueryBuilders.boostingQuery(QueryBuilders.matchQuery("field1", "one"), QueryBuilders.matchQuery("field1", "two")) .boost(randomFloat()) .negativeBoost(randomFloat()); logger.info(q.toString()); SearchResponse resp = client().prepareSearch() .setQuery(q) .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); for (Map.Entry<String, List<ProfileShardResult>> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); } CollectorResult result = searchProfiles.getCollectorResult(); assertThat(result.getName(), not(isEmptyOrNullString())); assertThat(result.getTime(), greaterThan(0L)); } } } public void testDisMaxRange() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } indexRandom(true, docs); refresh(); QueryBuilder q = QueryBuilders.disMaxQuery() .boost(0.33703882f) .add(QueryBuilders.rangeQuery("field2").from(null).to(73).includeLower(true).includeUpper(true)); logger.info(q.toString()); SearchResponse resp = client().prepareSearch() .setQuery(q) .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); for (Map.Entry<String, List<ProfileShardResult>> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); } CollectorResult result = searchProfiles.getCollectorResult(); assertThat(result.getName(), not(isEmptyOrNullString())); assertThat(result.getTime(), greaterThan(0L)); } } } public void testRange() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } indexRandom(true, docs); refresh(); QueryBuilder q = QueryBuilders.rangeQuery("field2").from(0).to(5); logger.info(q.toString()); SearchResponse resp = client().prepareSearch() .setQuery(q) .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .execute().actionGet(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); for (Map.Entry<String, List<ProfileShardResult>> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); } CollectorResult result = searchProfiles.getCollectorResult(); assertThat(result.getName(), not(isEmptyOrNullString())); assertThat(result.getTime(), greaterThan(0L)); } } } public void testPhrase() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i) + " " + English.intToEnglish(i+1), "field2", i ); } indexRandom(true, docs); refresh(); QueryBuilder q = QueryBuilders.matchPhraseQuery("field1", "one two"); logger.info(q.toString()); SearchResponse resp = client().prepareSearch() .setQuery(q) .setIndices("test") .setTypes("type1") .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) .execute().actionGet(); if (resp.getShardFailures().length > 0) { for (ShardSearchFailure f : resp.getShardFailures()) { logger.error(f.toString()); } fail(); } assertNotNull("Profile response element should not be null", resp.getProfileResults()); for (Map.Entry<String, List<ProfileShardResult>> shardResult : resp.getProfileResults().entrySet()) { for (ProfileShardResult searchProfiles : shardResult.getValue()) { for (ProfileResult result : searchProfiles.getQueryResults()) { assertNotNull(result.getQueryName()); assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); } CollectorResult result = searchProfiles.getCollectorResult(); assertThat(result.getName(), not(isEmptyOrNullString())); assertThat(result.getTime(), greaterThan(0L)); } } } /** * This test makes sure no profile results are returned when profiling is disabled */ public void testNoProfile() throws Exception { createIndex("test"); ensureGreen(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { docs[i] = client().prepareIndex("test", "type1", String.valueOf(i)).setSource( "field1", English.intToEnglish(i), "field2", i ); } indexRandom(true, docs); refresh(); QueryBuilder q = QueryBuilders.rangeQuery("field2").from(0).to(5); logger.info(q.toString()); SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(false).execute().actionGet(); assertThat("Profile response element should be an empty map", resp.getProfileResults().size(), equalTo(0)); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.raptor.storage; import com.facebook.presto.raptor.metadata.ShardManager; import com.facebook.presto.raptor.util.PrioritizedFifoExecutor; import com.facebook.presto.spi.NodeManager; import com.facebook.presto.spi.PrestoException; import com.google.common.annotations.VisibleForTesting; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.ListenableFuture; import io.airlift.log.Logger; import io.airlift.units.DataSize; import io.airlift.units.Duration; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.inject.Inject; import java.io.File; import java.io.IOException; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.util.Comparator; import java.util.Objects; import java.util.Set; import java.util.UUID; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static com.facebook.presto.raptor.RaptorErrorCode.RAPTOR_RECOVERY_ERROR; import static com.facebook.presto.raptor.util.FileUtil.copyFile; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.util.concurrent.MoreExecutors.directExecutor; import static io.airlift.concurrent.Threads.daemonThreadsNamed; import static io.airlift.units.DataSize.Unit.BYTE; import static io.airlift.units.Duration.nanosSince; import static java.lang.String.format; import static java.nio.file.StandardCopyOption.ATOMIC_MOVE; import static java.util.concurrent.Executors.newCachedThreadPool; import static java.util.concurrent.Executors.newScheduledThreadPool; import static java.util.concurrent.TimeUnit.SECONDS; public class ShardRecoveryManager { private static final Logger log = Logger.get(ShardRecoveryManager.class); private final StorageService storageService; private final String nodeIdentifier; private final ShardManager shardManager; private final Duration missingShardDiscoveryInterval; private final AtomicBoolean started = new AtomicBoolean(); private final MissingShardsQueue shardQueue; private final ScheduledExecutorService missingShardExecutor = newScheduledThreadPool(1, daemonThreadsNamed("missing-shard-discovery")); private final ExecutorService executorService = newCachedThreadPool(daemonThreadsNamed("shard-recovery-%s")); @Inject public ShardRecoveryManager(StorageService storageService, NodeManager nodeManager, ShardManager shardManager, StorageManagerConfig storageManagerConfig) { this(storageService, nodeManager, shardManager, storageManagerConfig.getMissingShardDiscoveryInterval(), storageManagerConfig.getRecoveryThreads()); } public ShardRecoveryManager(StorageService storageService, NodeManager nodeManager, ShardManager shardManager, Duration missingShardDiscoveryInterval, int recoveryThreads) { this.storageService = checkNotNull(storageService, "storageService is null"); this.nodeIdentifier = checkNotNull(nodeManager, "nodeManager is null").getCurrentNode().getNodeIdentifier(); this.shardManager = checkNotNull(shardManager, "shardManager is null"); this.missingShardDiscoveryInterval = checkNotNull(missingShardDiscoveryInterval, "missingShardDiscoveryInterval is null"); this.shardQueue = new MissingShardsQueue(new PrioritizedFifoExecutor<>(executorService, recoveryThreads, new MissingShardComparator())); } @PostConstruct public void start() { if (!storageService.isBackupAvailable()) { return; } if (started.compareAndSet(false, true)) { enqueueMissingShards(); } } @PreDestroy public void shutdown() { executorService.shutdownNow(); missingShardExecutor.shutdownNow(); } private void enqueueMissingShards() { missingShardExecutor.scheduleWithFixedDelay(() -> { try { SECONDS.sleep(ThreadLocalRandom.current().nextInt(1, 30)); for (UUID shard : getMissingShards()) { shardQueue.submit(MissingShard.createBackgroundMissingShard(shard)); } } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (Throwable t) { log.error(t, "Error creating shard recovery tasks"); } }, 0, missingShardDiscoveryInterval.toMillis(), TimeUnit.MILLISECONDS); } private Set<UUID> getMissingShards() { ImmutableSet.Builder<UUID> missingShards = ImmutableSet.builder(); shardManager.getNodeShards(nodeIdentifier).stream() .filter(shardUuid -> { File storageFile = storageService.getStorageFile(shardUuid); File backupFile = storageService.getBackupFile(shardUuid); return (!storageFile.exists() || storageFile.length() != backupFile.length()); }) .forEach(missingShards::add); return missingShards.build(); } public Future<?> recoverShard(UUID shardUuid) throws ExecutionException { checkNotNull(shardUuid, "shardUuid is null"); return shardQueue.submit(MissingShard.createActiveMissingShard(shardUuid)); } @VisibleForTesting void restoreFromBackup(UUID shardUuid) { File storageFile = storageService.getStorageFile(shardUuid); File backupFile = storageService.getBackupFile(shardUuid); if (!backupFile.exists()) { throw new PrestoException(RAPTOR_RECOVERY_ERROR, "No backup file found for shard: " + shardUuid); } if (storageFile.exists()) { if (storageFile.length() == backupFile.length()) { return; } log.warn("Local shard file is corrupt. Deleting local file: %s", shardUuid); storageFile.delete(); } // create a temporary file in the staging directory File stagingFile = temporarySuffix(storageService.getStagingFile(shardUuid)); storageService.createParents(stagingFile); // copy to temporary file log.info("Copying shard %s from backup...", shardUuid); long start = System.nanoTime(); try { copyFile(backupFile.toPath(), stagingFile.toPath()); } catch (IOException e) { throw new PrestoException(RAPTOR_RECOVERY_ERROR, "Failed to copy backup shard: " + shardUuid, e); } Duration duration = nanosSince(start); DataSize size = new DataSize(stagingFile.length(), BYTE); DataSize rate = dataRate(size, duration).convertToMostSuccinctDataSize(); log.info("Copied shard %s from backup in %s (%s at %s/s)", shardUuid, duration, size, rate); // move to final location storageService.createParents(storageFile); try { Files.move(stagingFile.toPath(), storageFile.toPath(), ATOMIC_MOVE); } catch (FileAlreadyExistsException e) { // someone else already created it (should not happen, but safe to ignore) } catch (IOException e) { throw new PrestoException(RAPTOR_RECOVERY_ERROR, "Failed to move shard: " + shardUuid, e); } if (!storageFile.exists() || storageFile.length() != backupFile.length()) { throw new PrestoException(RAPTOR_RECOVERY_ERROR, format("File not recovered correctly: %s", shardUuid)); } if (storageFile.length() != backupFile.length()) { log.info("Files do not match after recovery. Deleting local file: " + shardUuid); storageFile.delete(); } } @VisibleForTesting static class MissingShardComparator implements Comparator<MissingShardRunnable> { @Override public int compare(MissingShardRunnable shard1, MissingShardRunnable shard2) { if (shard1.isActive() == shard2.isActive()) { return 0; } return shard1.isActive() ? -1 : 1; } } interface MissingShardRunnable extends Runnable { boolean isActive(); } @VisibleForTesting private class MissingShardRecovery implements MissingShardRunnable { private final UUID shardUuid; private final boolean active; public MissingShardRecovery(UUID shardUuid, boolean active) { this.shardUuid = checkNotNull(shardUuid, "shardUuid is null"); this.active = checkNotNull(active, "active is null"); } @Override public void run() { restoreFromBackup(shardUuid); } @Override public boolean isActive() { return active; } } private static final class MissingShard { private final UUID shardUuid; private final boolean active; private MissingShard(UUID shardUuid, boolean active) { this.shardUuid = checkNotNull(shardUuid, "shardUuid is null"); this.active = active; } public static MissingShard createBackgroundMissingShard(UUID shardUuid) { return new MissingShard(shardUuid, false); } public static MissingShard createActiveMissingShard(UUID shardUuid) { return new MissingShard(shardUuid, true); } public UUID getShardUuid() { return shardUuid; } public boolean isActive() { return active; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } MissingShard other = (MissingShard) o; return Objects.equals(this.active, other.active) && Objects.equals(this.shardUuid, other.shardUuid); } @Override public int hashCode() { return Objects.hash(shardUuid, active); } @Override public String toString() { return toStringHelper(this) .add("shardUuid", shardUuid) .add("active", active) .toString(); } } private class MissingShardsQueue { private final LoadingCache<MissingShard, Future<?>> queuedMissingShards; public MissingShardsQueue(PrioritizedFifoExecutor<MissingShardRunnable> shardRecoveryExecutor) { checkNotNull(shardRecoveryExecutor, "shardRecoveryExecutor is null"); this.queuedMissingShards = CacheBuilder.newBuilder().build(new CacheLoader<MissingShard, Future<?>>() { @Override public Future<?> load(MissingShard missingShard) { MissingShardRecovery task = new MissingShardRecovery(missingShard.getShardUuid(), missingShard.isActive()); ListenableFuture<?> future = shardRecoveryExecutor.submit(task); future.addListener(() -> queuedMissingShards.invalidate(missingShard), directExecutor()); return future; } }); } public Future<?> submit(MissingShard shard) throws ExecutionException { return queuedMissingShards.get(shard); } } private static DataSize dataRate(DataSize size, Duration duration) { double rate = size.toBytes() / duration.getValue(SECONDS); if (Double.isNaN(rate) || Double.isInfinite(rate)) { rate = 0; } return new DataSize(rate, BYTE).convertToMostSuccinctDataSize(); } private static File temporarySuffix(File file) { return new File(file.getPath() + ".tmp-" + UUID.randomUUID()); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.microsoft.azure.servicebus; import java.io.Serializable; import java.nio.charset.Charset; import java.time.Duration; import java.time.Instant; import java.util.HashMap; import java.util.Map; import java.util.UUID; public final class Message implements Serializable, IMessage { private static final long serialVersionUID = 7849508139219590863L; private static final Charset DEFAULT_CHAR_SET = Charset.forName("UTF-8"); private static final String DEFAULT_CONTENT_TYPE = null; private static final MessageBody DEFAULT_CONTENT = Utils.fromBinay(new byte[0]); private long deliveryCount; private String messageId; private Duration timeToLive; private MessageBody messageBody; private String contentType; private String sessionId; private long sequenceNumber; private Instant enqueuedTimeUtc; private Instant scheduledEnqueueTimeUtc; private Instant lockedUntilUtc; private Map<String, Object> properties; private String correlationId; private String replyToSessionId; private String label; private String to; private String replyTo; private String partitionKey; private String viaPartitionKey; private String deadLetterSource; private UUID lockToken; private byte[] deliveryTag; /** * Creates an empty message with an empty byte array as body. */ public Message() { this(DEFAULT_CONTENT); } /** * Creates a message from a string. For backward compatibility reasons, the string is converted to a byte array and message body type is set to binary. * @param content content of the message. */ public Message(String content) { this(content.getBytes(DEFAULT_CHAR_SET)); } /** * Creates a message from a byte array. Message body type is set to binary. * @param content content of the message */ public Message(byte[] content) { this(Utils.fromBinay(content)); } /** * Creates a message from message body. * @param body message body */ public Message(MessageBody body) { this(body, DEFAULT_CONTENT_TYPE); } /** * Creates a message from a string. For backward compatibility reasons, the string is converted to a byte array and message body type is set to binary. * @param content content of the message * @param contentType content type of the message */ public Message(String content, String contentType) { this(content.getBytes(DEFAULT_CHAR_SET), contentType); } /** * Creates a message from a byte array. Message body type is set to binary. * @param content content of the message * @param contentType content type of the message */ public Message(byte[] content, String contentType) { this(Utils.fromBinay(content), contentType); } /** * Creates a message from message body. * @param body message body * @param contentType content type of the message */ public Message(MessageBody body, String contentType) { this(UUID.randomUUID().toString(), body, contentType); } /** * Creates a message from a string. For backward compatibility reasons, the string is converted to a byte array and message body type is set to binary. * @param messageId id of the message * @param content content of the message * @param contentType content type of the message */ public Message(String messageId, String content, String contentType) { this(messageId, content.getBytes(DEFAULT_CHAR_SET), contentType); } /** * Creates a message from a byte array. Message body type is set to binary. * @param messageId id of the message * @param content content of the message * @param contentType content type of the message */ public Message(String messageId, byte[] content, String contentType) { this(messageId, Utils.fromBinay(content), contentType); } /** * Creates a message from message body. * @param messageId id of the message * @param body message body * @param contentType content type of the message */ public Message(String messageId, MessageBody body, String contentType) { this.messageId = messageId; this.messageBody = body; this.contentType = contentType; this.properties = new HashMap<>(); } @Override public long getDeliveryCount() { return deliveryCount; } void setDeliveryCount(long deliveryCount) { this.deliveryCount = deliveryCount; } @Override public String getMessageId() { return messageId; } @Override public void setMessageId(String messageId) { this.messageId = messageId; } @Override public Duration getTimeToLive() { return timeToLive; } @Override public void setTimeToLive(Duration timeToLive) { if (timeToLive.isZero() || timeToLive.isNegative()) { throw new IllegalArgumentException("timeToLive must be positive duration."); } this.timeToLive = timeToLive; } @Override public String getContentType() { return this.contentType; } @Override public void setContentType(String contentType) { this.contentType = contentType; } @Override public Instant getExpiresAtUtc() { return this.enqueuedTimeUtc.plus(this.timeToLive); } @Override public Instant getLockedUntilUtc() { return this.lockedUntilUtc; } public void setLockedUntilUtc(Instant lockedUntilUtc) { this.lockedUntilUtc = lockedUntilUtc; } @Override public Instant getEnqueuedTimeUtc() { return this.enqueuedTimeUtc; } void setEnqueuedTimeUtc(Instant enqueuedTimeUtc) { this.enqueuedTimeUtc = enqueuedTimeUtc; } @Override public long getSequenceNumber() { return this.sequenceNumber; } void setSequenceNumber(long sequenceNumber) { this.sequenceNumber = sequenceNumber; } @Override public String getSessionId() { return this.sessionId; } @Override public void setSessionId(String sessionId) { this.sessionId = sessionId; this.partitionKey = sessionId; } @Override public Map<String, Object> getProperties() { return this.properties; } @Override public void setProperties(Map<String, Object> properties) { this.properties = properties; } @Override public String getCorrelationId() { return this.correlationId; } @Override public void setCorrelationId(String correlationId) { this.correlationId = correlationId; } @Override public String getTo() { return this.to; } @Override public void setTo(String to) { this.to = to; } @Override public String getReplyTo() { return this.replyTo; } @Override public void setReplyTo(String replyTo) { this.replyTo = replyTo; } @Override public String getLabel() { return this.label; } @Override public void setLabel(String label) { this.label = label; } @Override public String getReplyToSessionId() { return this.replyToSessionId; } @Override public void setReplyToSessionId(String replyToSessionId) { this.replyToSessionId = replyToSessionId; } @Deprecated @Override public Instant getScheduledEnqueuedTimeUtc() { return this.getScheduledEnqueueTimeUtc(); } @Deprecated @Override public void setScheduledEnqueuedTimeUtc(Instant scheduledEnqueueTimeUtc) { this.setScheduledEnqueueTimeUtc(scheduledEnqueueTimeUtc); } @Override public Instant getScheduledEnqueueTimeUtc() { return this.scheduledEnqueueTimeUtc; } @Override public void setScheduledEnqueueTimeUtc(Instant scheduledEnqueueTimeUtc) { this.scheduledEnqueueTimeUtc = scheduledEnqueueTimeUtc; } @Override public String getPartitionKey() { return this.partitionKey; } @Override public void setPartitionKey(String partitionKey) { if (this.sessionId != null && !this.sessionId.equals(partitionKey)) { // SessionId is set. Then partition key must be same as session id. throw new IllegalArgumentException("PartitionKey:" + partitionKey +" is not same as SessionId:" + this.sessionId); } this.partitionKey = partitionKey; } @Override public String getViaPartitionKey() { return this.viaPartitionKey; } @Override public void setViaPartitionKey(String partitionKey) { this.viaPartitionKey = partitionKey; } @Override public String getDeadLetterSource() { return this.deadLetterSource; } void setDeadLetterSource(String deadLetterSource) { this.deadLetterSource = deadLetterSource; } @Override public UUID getLockToken() { return this.lockToken; } void setLockToken(UUID lockToken) { this.lockToken = lockToken; } byte[] getDeliveryTag() { return this.deliveryTag; } void setDeliveryTag(byte[] deliveryTag) { this.deliveryTag = deliveryTag; } @Override @Deprecated public byte[] getBody() { return Utils.getDataFromMessageBody(this.messageBody); } @Override @Deprecated public void setBody(byte[] body) { this.messageBody = Utils.fromBinay(body); } @Override public MessageBody getMessageBody() { return this.messageBody; } @Override public void setMessageBody(MessageBody body) { this.messageBody = body; } @Override public IMessage createCopy() { Message copy = new Message(this.getMessageBody(), this.getContentType()); copy.setProperties(this.getProperties()); // Retain the same properties copy.setMessageId(this.getMessageId()); copy.setCorrelationId(this.getCorrelationId()); copy.setTo(this.getTo()); copy.setReplyTo(this.getReplyTo()); copy.setLabel(this.getLabel()); copy.setReplyToSessionId(this.getReplyToSessionId()); copy.setSessionId(this.getSessionId()); copy.setScheduledEnqueueTimeUtc(this.getScheduledEnqueueTimeUtc()); copy.setPartitionKey(this.getPartitionKey()); copy.setTimeToLive(this.getTimeToLive()); return copy; } }
package org.uael.jds; import javax.swing.*; import javax.swing.tree.DefaultMutableTreeNode; /** * The type B tree. * @param <T> the type parameter */ public class BinaryTree<T> { /** * The Root. */ public Node root; /** * The Size. */ int size; /** * Instantiates a new B tree. */ public BinaryTree() { size = 0; root = null; } public BinaryTree(T ...values) { this(); List<Node> line = new List<>(); Queue<T> queue = new Queue<>(values); if (!queue.isEmpty()) { line.push(insertLeft(queue.pop())); } while (!queue.isEmpty()) { for (Node node : line.copy()) { if (queue.isEmpty()) { break; } line.push(node.insertLeft(queue.pop())); if (!queue.isEmpty()) { line.push(node.insertRight(queue.pop())); } line.erase(node); } } } public void clear() { removeLeft(); removeRight(); root = null; size = 0; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; BinaryTree<?> binaryTree = (BinaryTree<?>) o; return size == binaryTree.size && (root != null ? root.equals(binaryTree.root) : binaryTree.root == null); } @Override public int hashCode() { int result = root != null ? root.hashCode() : 0; result = 31 * result + size; return result; } /** * Insert left node. * @param value the value * @return the node */ public Node insertLeft(T value) { return insertLeft(root, value); } /** * Insert left node. * @param node the node * @param value the value * @return the node */ Node insertLeft(Node node, T value) { if (node == null) { if (size > 0) { return null; } size++; return root = new Node(value); } else { return node.insertLeft(value); } } /** * Insert right node. * @param value the value * @return the node */ public Node insertRight(T value) { return insertRight(root, value); } /** * Insert right node. * @param node the node * @param value the value * @return the node */ Node insertRight(Node node, T value) { if (node == null) { if (size > 0) { return null; } size++; return root = new Node(value); } else { return node.insertRight(value); } } public boolean isEmpty() { return size == 0; } /** * Left node. * @return the node */ public Node left() { return root != null ? root.left : null; } /** * Remove left node. * @return the node */ public Node removeLeft() { return root != null ? root.removeLeft() : null; } /** * Remove right node. * @return the node */ public Node removeRight() { return root != null ? root.removeRight() : null; } /** * Right node. * @return the node */ public Node right() { return root != null ? root.right : null; } /** * Show. */ public void show() { JFrame f = new JFrame("A N-ary Tree vizualized with JTree"); f.setSize(800, 600); f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); f.setLocationRelativeTo(null); if (root != null) { f.getContentPane().add(new JTree(root.toTreeNode())); } // Visualiser le cadre f.setVisible(true); } /** * Size int. * @return the int */ public int size() { return size; } @Override public String toString() { return "BinaryTree{" + (root != null ? "root=" + root + ", size=" + size : "size=0") + '}'; } /** * The type Node. */ public class Node { /** * The Left. */ Node left, /** * The Right. */ right; /** * The Value. */ public T value; /** * Instantiates a new Node. * @param value the value */ Node(T value) { this.value = value; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Node node = (Node) o; if (left != null ? !left.equals(node.left) : node.left != null) return false; if (right != null ? !right.equals(node.right) : node.right != null) return false; return value != null ? value.equals(node.value) : node.value == null; } @Override public int hashCode() { int result = left != null ? left.hashCode() : 0; result = 31 * result + (right != null ? right.hashCode() : 0); result = 31 * result + (value != null ? value.hashCode() : 0); return result; } /** * Insert left node. * @param value the value * @return the node */ public Node insertLeft(T value) { if (left != null) { return null; } size++; return left = new Node(value); } /** * Insert right node. * @param value the value * @return the node */ public Node insertRight(T value) { if (right != null) { return null; } size++; return right = new Node(value); } /** * Is leaf boolean. * @return the boolean */ boolean isLeaf() { return left == null && right == null; } /** * Left node. * @return the node */ public Node left() { return left; } /** * Remove left node. * @return the node */ public Node removeLeft() { Node ret = null; if (left != null) { left.removeLeft(); ret = left; left = null; } return ret; } /** * Remove right node. * @return the node */ public Node removeRight() { Node ret = null; if (right != null) { right.removeRight(); ret = right; right = null; } return ret; } /** * Right node. * @return the node */ public Node right() { return right; } @Override public String toString() { return "Node{" + "value=" + value + ", left=" + (left != null ? left : "null") + ", right=" + (right != null ? right : "null") + "}"; } /** * To tree node default mutable tree node. * @return the default mutable tree node */ public DefaultMutableTreeNode toTreeNode() { DefaultMutableTreeNode node = new DefaultMutableTreeNode(); node.setUserObject(value); if (left != null) { node.add(left.toTreeNode()); } if (right != null) { node.add(right.toTreeNode()); } return node; } } }
/** */ package guizmo.structure.impl; import guizmo.structure.Linkable; import guizmo.structure.StructurePackage; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.EObjectImpl; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Linkable</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * <ul> * <li>{@link guizmo.structure.impl.LinkableImpl#isIsHyperlink <em>Is Hyperlink</em>}</li> * <li>{@link guizmo.structure.impl.LinkableImpl#getUri <em>Uri</em>}</li> * </ul> * </p> * * @generated */ public abstract class LinkableImpl extends EObjectImpl implements Linkable { /** * The default value of the '{@link #isIsHyperlink() <em>Is Hyperlink</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isIsHyperlink() * @generated * @ordered */ protected static final boolean IS_HYPERLINK_EDEFAULT = false; /** * The cached value of the '{@link #isIsHyperlink() <em>Is Hyperlink</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isIsHyperlink() * @generated * @ordered */ protected boolean isHyperlink = IS_HYPERLINK_EDEFAULT; /** * The default value of the '{@link #getUri() <em>Uri</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUri() * @generated * @ordered */ protected static final String URI_EDEFAULT = null; /** * The cached value of the '{@link #getUri() <em>Uri</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getUri() * @generated * @ordered */ protected String uri = URI_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected LinkableImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return StructurePackage.Literals.LINKABLE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isIsHyperlink() { return isHyperlink; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setIsHyperlink(boolean newIsHyperlink) { boolean oldIsHyperlink = isHyperlink; isHyperlink = newIsHyperlink; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, StructurePackage.LINKABLE__IS_HYPERLINK, oldIsHyperlink, isHyperlink)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getUri() { return uri; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setUri(String newUri) { String oldUri = uri; uri = newUri; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, StructurePackage.LINKABLE__URI, oldUri, uri)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case StructurePackage.LINKABLE__IS_HYPERLINK: return isIsHyperlink(); case StructurePackage.LINKABLE__URI: return getUri(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case StructurePackage.LINKABLE__IS_HYPERLINK: setIsHyperlink((Boolean)newValue); return; case StructurePackage.LINKABLE__URI: setUri((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case StructurePackage.LINKABLE__IS_HYPERLINK: setIsHyperlink(IS_HYPERLINK_EDEFAULT); return; case StructurePackage.LINKABLE__URI: setUri(URI_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case StructurePackage.LINKABLE__IS_HYPERLINK: return isHyperlink != IS_HYPERLINK_EDEFAULT; case StructurePackage.LINKABLE__URI: return URI_EDEFAULT == null ? uri != null : !URI_EDEFAULT.equals(uri); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (isHyperlink: "); result.append(isHyperlink); result.append(", uri: "); result.append(uri); result.append(')'); return result.toString(); } } //LinkableImpl
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.xml; import com.intellij.codeInsight.actions.OptimizeImportsProcessor; import com.intellij.codeInsight.daemon.impl.analysis.XmlUnusedNamespaceInspection; import com.intellij.codeInsight.intention.IntentionAction; import com.intellij.codeInspection.htmlInspections.XmlInspectionToolProvider; import com.intellij.ide.highlighter.XmlFileType; import com.intellij.javaee.ExternalResourceManagerExImpl; import com.intellij.openapi.application.Result; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.testFramework.fixtures.LightCodeInsightFixtureTestCase; import org.jetbrains.annotations.NotNull; /** * @author Dmitry Avdeev */ public class XmlNamespacesTest extends LightCodeInsightFixtureTestCase { public void testUnusedNamespaces() { doUnusedDeclarationTest( "<all xmlns=\"http://www.w3.org/2001/XMLSchema\" <warning descr=\"Namespace declaration is never used\">xmlns:xsi=\"http://www.w3.org/2001/XMLSc<caret>hema-instance\"</warning>/>", "<all xmlns=\"http://www.w3.org/2001/XMLSchema\"/>", XmlBundle.message("xml.inspections.unused.schema.remove")); } public void testUnusedDefaultNamespace() { doUnusedDeclarationTest("<schema:schema \n" + " xmlns:schema=\"http://www.w3.org/2001/XMLSchema\"\n" + " <warning descr=\"Namespace declaration is never used\">xmlns=\"http://www.w3.org/2001/X<caret>Include\"</warning>\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " <warning descr=\"Namespace location is never used\">xsi:noNamespaceSchemaLocation=\"http://www.w3.org/2001/XInclude\"</warning>>\n" + "</schema:schema>", "<schema:schema\n" + " xmlns:schema=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + ">\n" + "</schema:schema>", XmlBundle.message("xml.inspections.unused.schema.remove"), false); doOptimizeImportsTest("<schema:schema \n" + " xmlns:schema=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + ">\n" + "</schema:schema>"); } public void testDifferentPrefixes() { doUnusedDeclarationTest( "<x:all <warning descr=\"Namespace declaration is never used\">xmlns=\"http://www.w3.org/2001/XMLS<caret>chema\"</warning>\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " <warning descr=\"Namespace declaration is never used\">xmlns:y=\"http://www.w3.org/2001/XMLSchema\"</warning>/>", "<x:all\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:y=\"http://www.w3.org/2001/XMLSchema\"/>", XmlBundle.message("xml.inspections.unused.schema.remove"), false); doOptimizeImportsTest("<x:all\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + "/>"); } public void testUnusedLocation() { doUnusedDeclarationTest("<x:all\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xsi:schemaLocation=\"<warning descr=\"Namespace location is never used\">http://www.w3.org/2001/XML<caret>Sche</warning> " + "<warning descr=\"Namespace location is never used\">http://www.w3.org/2001/XMLSchema.xsd</warning>\"/>", "<x:all\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + "/>", XmlUnusedNamespaceInspection.RemoveNamespaceLocationFix.NAME); } public void testUnusedLocationOnly() { doUnusedDeclarationTest("<x:all\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " <warning descr=\"Namespace declaration is never used\">xmlns:y=\"http://www.w3.org/2001/XInclude\"</warning>\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xsi:schemaLocation=\"<warning descr=\"Namespace location is never used\">http://www.w3.org/2001/XI<caret>nclude</warning> <warning descr=\"Namespace location is never used\">http://www.w3.org/2001/XInclude.xsd</warning>\n" + " http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd\"/>", "<x:all\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xsi:schemaLocation=\"http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd\"/>", XmlBundle.message("xml.inspections.unused.schema.remove")); } public void testUnusedDefaultLocation() { doUnusedDeclarationTest("<x:all\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " <warning descr=\"Namespace location is never used\">xsi:noNamespaceSc<caret>hemaLocation=\"<error descr=\"Cannot resolve file 'zzz'\">zzz</error>\"</warning> />", "<x:all\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + "/>", XmlUnusedNamespaceInspection.RemoveNamespaceLocationFix.NAME); } public void testKeepFormatting() { doUnusedDeclarationTest("<xs:schema attributeFormDefault=\"unqualified\"\n" + " <warning descr=\"Namespace declaration is never used\">xmlns:xsi=\"http://www.w3.org/20<caret>01/XMLSchema-instance\"</warning>\n" + " elementFormDefault=\"qualified\" xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">\n" + "\n" + " <!-- keep formatting here-->\n" + " <xs:element name=\"a\" type=\"aType\"/>\n" + " <xs:complexType name=\"aType\">\n" + "\n" + " </xs:complexType>\n" + "</xs:schema>", "<xs:schema attributeFormDefault=\"unqualified\"\n" + " elementFormDefault=\"qualified\" xmlns:xs=\"http://www.w3.org/2001/XMLSchema\">\n" + "\n" + " <!-- keep formatting here-->\n" + " <xs:element name=\"a\" type=\"aType\"/>\n" + " <xs:complexType name=\"aType\">\n" + "\n" + " </xs:complexType>\n" + "</xs:schema>", XmlBundle.message("xml.inspections.unused.schema.remove")); } public void testImplicitPrefixUsage() { myFixture.configureByText("a.xml", "<xs:schema xmlns:xs=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:x2=\"http://www.w3.org/2001/XMLSchema\"\n" + " <warning descr=\"Namespace declaration is never used\">xmlns:x3=\"http://www.w3.org/2001/XMLSchema\"</warning> >\n" + " <xs:element name=\"a\" type=\"x2:string\"/>\n" + "</xs:schema>"); myFixture.testHighlighting(); } public void testUnusedLocationDetection() { myFixture.configureByFile("web-app_2_5.xsd"); myFixture.configureByText("a.xml", "<web-app xmlns=\"http://java.sun.com/xml/ns/javaee\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " <warning descr=\"Namespace declaration is never used\">xmlns:web=\"http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd\"</warning>\n" + " xsi:schemaLocation=\"http://java.sun.com/xml/ns/javaee\n" + " web-app_2_5.xsd\"\n" + " version=\"2.5\">\n" + "</web-app>"); myFixture.testHighlighting(); } public void testWSDD() { myFixture.configureByText("a.xml", "<deployment xmlns=\"http://xml.apache.org/axis/wsdd/\" xmlns:java=\"http://xml.apache.org/axis/wsdd/providers/java\">\n" + "<typeMapping deserializer=\"org.apache.axis.encoding.ser.BeanDeserializerFactory\" encodingStyle=\"\" qname=\"ns38:AxisAnalysis\" serializer=\"org.apache.axis.encoding.ser.BeanSerializerFactory\" languageSpecificType=\"java:com.pls.xactservice.axis.bindings.AxisAnalysis\"/>\n" + "</deployment>"); myFixture.testHighlighting(); } public void testPrefixesInTagValues() { myFixture.configureByText("a.xml", "<<info descr=\"Namespace '' is not bound\">nodeTypes</info> xmlns:nt=\"<error descr=\"URI is not registered (Settings | Languages & Frameworks | Schemas and DTDs)\">http://www.jcp.org/jcr/nt/1.0</error>\" xmlns:customns=\"<error descr=\"URI is not registered (Settings | Languages & Frameworks | Schemas and DTDs)\">http://customurl</error>\">\n" + "<nodeType name=\"customns:item\" isMixin=\"false\" hasOrderableChildNodes=\"false\">\n" + " <supertypes>\n" + " <supertype>nt:folder</supertype>\n" + " </supertypes>\n" + "</nodeType>\n" + "</<info descr=\"Namespace '' is not bound\">nodeTypes</info>>"); myFixture.testHighlighting(); } public void testLocallyUsedNamespace() { myFixture.configureByText("a.xml", "<x:all\n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xsi:schemaLocation=\"http://www.w3.org/2001/XMLSchema http://www.w3.org/2001/XMLSchema.xsd\n" + " http://www.w3.org/2001/XInclude http://www.w3.org/2001/XInclude.xsd\">\n" + "\n" + " <include xmlns=\"http://www.w3.org/2001/XInclude\" href=\"a.xml\"/>\n" + "</x:all>"); myFixture.testHighlighting(); } public void testLocallyUsedNamespaceWithPrefix() { myFixture.configureByText("a.xml", "<s:foo xmlns:s=\"<error descr=\"URI is not registered (Settings | Languages & Frameworks | Schemas and DTDs)\">http://foo</error>\"\n" + " <warning descr=\"Namespace declaration is never used\">xmlns:bar=\"<error descr=\"URI is not registered (Settings | Languages & Frameworks | Schemas and DTDs)\">http://bar</error>\"</warning>\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xsi:schemaLocation=\"http://bar <error descr=\"Cannot resolve file 'bar.xsd'\">bar.xsd</error> http://foo <error descr=\"Cannot resolve file 'foo.xsd'\">foo.xsd</error>\">\n" + "\n" + " <bar xmlns=\"<error descr=\"URI is not registered (Settings | Languages & Frameworks | Schemas and DTDs)\">http://bar</error>\"/>\n" + "\n" + "</s:foo>"); myFixture.testHighlighting(); } public void testSubDirectory() { myFixture.testHighlighting("moved.xml", "trg/move-def.xsd"); } public void testSuppressedOptimize() { myFixture.configureByFile("web-app_2_5.xsd"); String text = "<!--suppress XmlUnusedNamespaceDeclaration -->\n" + "<web-app xmlns=\"http://java.sun.com/xml/ns/javaee\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xsi:schemaLocation=\"http://java.sun.com/xml/ns/javaee\n" + " web-app_2_5.xsd\"\n" + " version=\"2.5\">\n" + "</web-app>"; myFixture.configureByText("a.xml", text); doOptimizeImportsTest(text); } public void testUsedInXmlns() { myFixture.testHighlighting("spring.xml", "spring-beans-2.5.xsd", "spring-batch-2.1.xsd"); IntentionAction action = myFixture.getAvailableIntention(XmlBundle.message("xml.inspections.unused.schema.remove")); assertNotNull(action); myFixture.launchAction(action); myFixture.checkResultByFile("spring_after.xml"); } public void testXsiType() { myFixture.testHighlighting("import.xml", "import.xsd"); } public void testDoNotOptimizeWhenInspectionDisabled() { myFixture.disableInspections(new XmlUnusedNamespaceInspection()); String text = "<all xmlns=\"http://www.w3.org/2001/XMLSchema\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"/>"; myFixture.configureByText(XmlFileType.INSTANCE, text); doOptimizeImportsTest(text); } public void testFixAll() { myFixture.configureByFiles("fixAll.xml", "spring-beans-2.5.xsd", "spring-batch-2.1.xsd"); IntentionAction action = myFixture.findSingleIntention("Fix all"); assertNotNull(action); myFixture.launchAction(action); myFixture.checkResultByFile("fixAll_after.xml"); } public void testImplicitPrefixes() { myFixture.configureByText(XmlFileType.INSTANCE, "<schema xmlns=\"http://www.w3.org/2001/XMLSchema\" \n" + " xmlns:x=\"http://www.w3.org/2001/XMLSchema\"\n" + " <warning descr=\"Namespace declaration is never used\">xmlns:y=\"http://www.w3.org/2001/XMLSchema\"</warning>>\n" + " <element name=\"a\" default=\"x:y\"/>\n" + "</schema>"); myFixture.testHighlighting(); } private void doUnusedDeclarationTest(String text, String after, String name) { doUnusedDeclarationTest(text, after, name, true); } private void doUnusedDeclarationTest(String text, String after, String name, boolean testOptimizeImports) { myFixture.configureByText("a.xml", text); myFixture.testHighlighting(); IntentionAction action = myFixture.getAvailableIntention(name); assertNotNull(name + " not found", action); myFixture.launchAction(action); myFixture.checkResult(after); myFixture.configureByText("a.xml", text); if (testOptimizeImports) { doOptimizeImportsTest(after); } } private void doOptimizeImportsTest(String after) { myFixture.testHighlighting(); new WriteCommandAction(getProject(), getFile()) { @Override protected void run(@NotNull Result result) { new OptimizeImportsProcessor(getProject(), getFile()).runWithoutProgress(); } }.execute(); myFixture.checkResult(after); } @Override protected void setUp() throws Exception { super.setUp(); myFixture.enableInspections(new XmlInspectionToolProvider()); ExternalResourceManagerExImpl.registerResourceTemporarily("http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd", getTestDataPath() + "/web-app_2_5.xsd", myFixture.getTestRootDisposable()); ExternalResourceManagerExImpl.registerResourceTemporarily("http://xml.apache.org/axis/wsdd/", getTestDataPath() + "/wsdd.dtd", myFixture.getTestRootDisposable()); ExternalResourceManagerExImpl.registerResourceTemporarily("http://xml.apache.org/axis/wsdd/providers/java", getTestDataPath() + "/wsdd_provider_java.xsd", myFixture.getTestRootDisposable()); } @Override protected String getBasePath() { return "/xml/tests/testData/unusedNs"; } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.soen.hasslefree.models; import com.soen.hasslefree.dao.ObjectDao; import com.soen.hasslefree.persistence.HibernateUtil; import java.io.Serializable; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.HashSet; import java.util.Set; import javax.persistence.CascadeType; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.Id; import javax.persistence.ManyToOne; import javax.persistence.OneToMany; import javax.persistence.Table; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.annotations.Type; import org.joda.time.DateTime; import org.joda.time.MutableDateTime; /** * * @author Khalid */ @Entity @Table public class PhysicianAvailability implements Serializable { @Id @GeneratedValue private long physicianAvailabilityID; @ManyToOne(cascade = CascadeType.ALL) private Physician relatedPhysician; @Column @Type(type = "org.joda.time.contrib.hibernate.PersistentDateTime") private DateTime startTime; @Column @Type(type = "org.joda.time.contrib.hibernate.PersistentDateTime") private DateTime endTime; @OneToMany(mappedBy = "physicianAvailability", fetch = FetchType.EAGER, cascade = CascadeType.ALL) private Set<PhysicianTimeSlot> physicianTimeSlots = new HashSet<PhysicianTimeSlot>(0); public long getPhysicianAvailabilityID() { return physicianAvailabilityID; } public void setPhysicianAvailabilityID(long physicianAvailabilityID) { this.physicianAvailabilityID = physicianAvailabilityID; } public Physician getRelatedPhysician() { return relatedPhysician; } public void setRelatedPhysician(Physician relatedPhysician) { this.relatedPhysician = relatedPhysician; } public DateTime getStartTime() { return startTime; } public void setStartTime(DateTime startTime) { this.startTime = startTime; } public DateTime getEndTime() { return endTime; } public void setEndTime(DateTime endTime) { this.endTime = endTime; } public Set<PhysicianTimeSlot> getPhysicianTimeSlots() { return physicianTimeSlots; } public void setPhysicianTimeSlots(Set<PhysicianTimeSlot> physicianTimeSlots) { this.physicianTimeSlots = physicianTimeSlots; } public boolean savePhysicianAvailability() { boolean hasCommted = false; AppointmentType dropIn = AppointmentType.searchForAppointmentType("drop"); if (dropIn != null) { hasCommted = generateTimeSlots(this.startTime, this.endTime, dropIn.getDuration()); } return hasCommted; } public void updatePhysicianAvailability() throws IllegalAccessException, InvocationTargetException { ObjectDao physicianAvailabilityDao = new ObjectDao(); physicianAvailabilityDao.updateObject(this,this.getPhysicianAvailabilityID(),PhysicianAvailability.class); } public void deletePhysicianAvailability() throws IllegalAccessException, InvocationTargetException { ObjectDao physicianAvailabilityDao = new ObjectDao(); physicianAvailabilityDao.deleteObject(this,this.getPhysicianAvailabilityID(),PhysicianAvailability.class); } public static PhysicianAvailability getPhysicianAvailabilityById(long id) { PhysicianAvailability physicianAvailabilityHolder = null; Session session = null; try { session = HibernateUtil.getSessionFactory().openSession(); physicianAvailabilityHolder = (PhysicianAvailability) session.get(PhysicianAvailability.class, id); } catch (HibernateException e) { e.printStackTrace(); } finally { if (session != null && session.isOpen()) { session.close(); } } return physicianAvailabilityHolder; } public static ArrayList<PhysicianAvailability> getAllPhysicianAvailabilities() { ArrayList<PhysicianAvailability> physicianAvailabilities; ObjectDao physicianAvailabilityDao = new ObjectDao(); physicianAvailabilities = physicianAvailabilityDao.getAllObjects("PhysicianAvailability"); return physicianAvailabilities; } public static ArrayList<PhysicianAvailability> getAllPhysicianAvailabilitiesforId(long id) { ArrayList<PhysicianAvailability> physicianAvailabilities; ArrayList<PhysicianAvailability> filteredAvailabilities = null; ObjectDao physicianAvailabilityDao = new ObjectDao(); physicianAvailabilities = physicianAvailabilityDao.getAllObjects("PhysicianAvailability"); for (PhysicianAvailability availability : physicianAvailabilities) { if (availability.getRelatedPhysician().getUserId() == id) { filteredAvailabilities.add(availability); } } return filteredAvailabilities; } public boolean generateTimeSlots(DateTime startTime, DateTime endTime, int dropInDurationInMinutes ) { MutableDateTime slotStatTime = new MutableDateTime(); MutableDateTime slotEndTime = new MutableDateTime(); long availabilityStartTimeInMillis = startTime.getMillis(); long availabilityEndTimeInMillis = startTime.getMillis(); long availableDuration = availabilityEndTimeInMillis - availabilityStartTimeInMillis; long slotDuration = dropInDurationInMinutes * 60 * 1000; // 20 min * 60 sec * 1000 millisecond ArrayList<RoomTimeSlot> roomSlots = RoomTimeSlot.getFilteredAvailableRoomSlotsForDate(startTime, endTime); if (availableDuration > 0) { long currentSlotStartTime = availabilityStartTimeInMillis; boolean stopSlicing = false; while (!stopSlicing) { //<editor-fold defaultstate="collapsed" desc="new PhysicianTimeSlot "> int roomSlotIndex = hasFoundFreeRoomSlot(currentSlotStartTime, roomSlots); if (roomSlotIndex < 0) { return false; } else { PhysicianTimeSlot newTimeSlot = new PhysicianTimeSlot(); slotStatTime.setMillis(currentSlotStartTime); slotEndTime.setMillis(currentSlotStartTime + slotDuration); newTimeSlot.setStartTime(slotStatTime.toDateTime()); newTimeSlot.setEndTime(slotEndTime.toDateTime()); newTimeSlot.setIsAvailable(true); newTimeSlot.setPhysicianAvailability(this); newTimeSlot.setRelatedPhysician(this.relatedPhysician); RoomTimeSlot roomTime = roomSlots.get(roomSlotIndex); newTimeSlot.setRelatedRoomTimeSlot(roomTime); roomTime.setPhysicianTimeSlot(newTimeSlot); roomTime.setIsAvailable(false); //</editor-fold> this.physicianTimeSlots.add(newTimeSlot); availableDuration = availableDuration - slotDuration; currentSlotStartTime = currentSlotStartTime + slotDuration; if (availableDuration < slotDuration) { // I removed = because I want to add the last slot to the time slots. stopSlicing = true; } } } } ObjectDao physicianAvailabilityDao = new ObjectDao(); reserveRoomSlot(roomSlots); physicianAvailabilityDao.addOrUpdateObject(this); return true; } public boolean reserveRoomSlot(ArrayList<RoomTimeSlot> roomSlots) { boolean hasReserved = false; for (RoomTimeSlot roomSlot : roomSlots) { roomSlot.updateRoomTimeSlot(); } return hasReserved; } private int hasFoundFreeRoomSlot(long startSlotTimeInMillis, ArrayList<RoomTimeSlot> roomSlots) { int index = -1; for (int i = 0; i < roomSlots.size(); i++) { RoomTimeSlot roomSlot = roomSlots.get(i); if (roomSlot.getStartTime().getMillis() == startSlotTimeInMillis) { index = i; } } return index; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.test.testdata; /** */ public class WordCountData { public static final String TEXT = "Goethe - Faust: Der Tragoedie erster Teil\n" + "Prolog im Himmel.\n" + "Der Herr. Die himmlischen Heerscharen. Nachher Mephistopheles. Die drei\n" + "Erzengel treten vor.\n" + "RAPHAEL: Die Sonne toent, nach alter Weise, In Brudersphaeren Wettgesang,\n" + "Und ihre vorgeschriebne Reise Vollendet sie mit Donnergang. Ihr Anblick\n" + "gibt den Engeln Staerke, Wenn keiner Sie ergruenden mag; die unbegreiflich\n" + "hohen Werke Sind herrlich wie am ersten Tag.\n" + "GABRIEL: Und schnell und unbegreiflich schnelle Dreht sich umher der Erde\n" + "Pracht; Es wechselt Paradieseshelle Mit tiefer, schauervoller Nacht. Es\n" + "schaeumt das Meer in breiten Fluessen Am tiefen Grund der Felsen auf, Und\n" + "Fels und Meer wird fortgerissen Im ewig schnellem Sphaerenlauf.\n" + "MICHAEL: Und Stuerme brausen um die Wette Vom Meer aufs Land, vom Land\n" + "aufs Meer, und bilden wuetend eine Kette Der tiefsten Wirkung rings umher.\n" + "Da flammt ein blitzendes Verheeren Dem Pfade vor des Donnerschlags. Doch\n" + "deine Boten, Herr, verehren Das sanfte Wandeln deines Tags.\n" + "ZU DREI: Der Anblick gibt den Engeln Staerke, Da keiner dich ergruenden\n" + "mag, Und alle deine hohen Werke Sind herrlich wie am ersten Tag.\n" + "MEPHISTOPHELES: Da du, o Herr, dich einmal wieder nahst Und fragst, wie\n" + "alles sich bei uns befinde, Und du mich sonst gewoehnlich gerne sahst, So\n" + "siehst du mich auch unter dem Gesinde. Verzeih, ich kann nicht hohe Worte\n" + "machen, Und wenn mich auch der ganze Kreis verhoehnt; Mein Pathos braechte\n" + "dich gewiss zum Lachen, Haettst du dir nicht das Lachen abgewoehnt. Von\n" + "Sonn' und Welten weiss ich nichts zu sagen, Ich sehe nur, wie sich die\n" + "Menschen plagen. Der kleine Gott der Welt bleibt stets von gleichem\n" + "Schlag, Und ist so wunderlich als wie am ersten Tag. Ein wenig besser\n" + "wuerd er leben, Haettst du ihm nicht den Schein des Himmelslichts gegeben;\n" + "Er nennt's Vernunft und braucht's allein, Nur tierischer als jedes Tier\n" + "zu sein. Er scheint mir, mit Verlaub von euer Gnaden, Wie eine der\n" + "langbeinigen Zikaden, Die immer fliegt und fliegend springt Und gleich im\n" + "Gras ihr altes Liedchen singt; Und laeg er nur noch immer in dem Grase! In\n" + "jeden Quark begraebt er seine Nase.\n" + "DER HERR: Hast du mir weiter nichts zu sagen? Kommst du nur immer\n" + "anzuklagen? Ist auf der Erde ewig dir nichts recht?\n" + "MEPHISTOPHELES: Nein Herr! ich find es dort, wie immer, herzlich\n" + "schlecht. Die Menschen dauern mich in ihren Jammertagen, Ich mag sogar\n" + "die armen selbst nicht plagen.\n" + "DER HERR: Kennst du den Faust?\n" + "MEPHISTOPHELES: Den Doktor?\n" + "DER HERR: Meinen Knecht!\n" + "MEPHISTOPHELES: Fuerwahr! er dient Euch auf besondre Weise. Nicht irdisch\n" + "ist des Toren Trank noch Speise. Ihn treibt die Gaerung in die Ferne, Er\n" + "ist sich seiner Tollheit halb bewusst; Vom Himmel fordert er die schoensten\n" + "Sterne Und von der Erde jede hoechste Lust, Und alle Naeh und alle Ferne\n" + "Befriedigt nicht die tiefbewegte Brust.\n" + "DER HERR: Wenn er mir auch nur verworren dient, So werd ich ihn bald in\n" + "die Klarheit fuehren. Weiss doch der Gaertner, wenn das Baeumchen gruent, Das\n" + "Bluet und Frucht die kuenft'gen Jahre zieren.\n" + "MEPHISTOPHELES: Was wettet Ihr? den sollt Ihr noch verlieren! Wenn Ihr\n" + "mir die Erlaubnis gebt, Ihn meine Strasse sacht zu fuehren.\n" + "DER HERR: Solang er auf der Erde lebt, So lange sei dir's nicht verboten,\n" + "Es irrt der Mensch so lang er strebt.\n" + "MEPHISTOPHELES: Da dank ich Euch; denn mit den Toten Hab ich mich niemals\n" + "gern befangen. Am meisten lieb ich mir die vollen, frischen Wangen. Fuer\n" + "einem Leichnam bin ich nicht zu Haus; Mir geht es wie der Katze mit der Maus.\n" + "DER HERR: Nun gut, es sei dir ueberlassen! Zieh diesen Geist von seinem\n" + "Urquell ab, Und fuehr ihn, kannst du ihn erfassen, Auf deinem Wege mit\n" + "herab, Und steh beschaemt, wenn du bekennen musst: Ein guter Mensch, in\n" + "seinem dunklen Drange, Ist sich des rechten Weges wohl bewusst.\n" + "MEPHISTOPHELES: Schon gut! nur dauert es nicht lange. Mir ist fuer meine\n" + "Wette gar nicht bange. Wenn ich zu meinem Zweck gelange, Erlaubt Ihr mir\n" + "Triumph aus voller Brust. Staub soll er fressen, und mit Lust, Wie meine\n" + "Muhme, die beruehmte Schlange.\n" + "DER HERR: Du darfst auch da nur frei erscheinen; Ich habe deinesgleichen\n" + "nie gehasst. Von allen Geistern, die verneinen, ist mir der Schalk am\n" + "wenigsten zur Last. Des Menschen Taetigkeit kann allzu leicht erschlaffen,\n" + "er liebt sich bald die unbedingte Ruh; Drum geb ich gern ihm den Gesellen\n" + "zu, Der reizt und wirkt und muss als Teufel schaffen. Doch ihr, die echten\n" + "Goettersoehne, Erfreut euch der lebendig reichen Schoene! Das Werdende, das\n" + "ewig wirkt und lebt, Umfass euch mit der Liebe holden Schranken, Und was\n" + "in schwankender Erscheinung schwebt, Befestigt mit dauernden Gedanken!\n" + "(Der Himmel schliesst, die Erzengel verteilen sich.)\n" + "MEPHISTOPHELES (allein): Von Zeit zu Zeit seh ich den Alten gern, Und\n" + "huete mich, mit ihm zu brechen. Es ist gar huebsch von einem grossen Herrn,\n" + "So menschlich mit dem Teufel selbst zu sprechen."; public static final String COUNTS = "machen 1\n" + "zeit 2\n" + "heerscharen 1\n" + "keiner 2\n" + "meine 3\n" + "fuehr 1\n" + "triumph 1\n" + "kommst 1\n" + "frei 1\n" + "schaffen 1\n" + "gesinde 1\n" + "langbeinigen 1\n" + "schalk 1\n" + "besser 1\n" + "solang 1\n" + "meer 4\n" + "fragst 1\n" + "gabriel 1\n" + "selbst 2\n" + "bin 1\n" + "sich 7\n" + "du 11\n" + "sogar 1\n" + "geht 1\n" + "immer 4\n" + "mensch 2\n" + "befestigt 1\n" + "lebt 2\n" + "mag 3\n" + "engeln 2\n" + "breiten 1\n" + "blitzendes 1\n" + "tags 1\n" + "sie 2\n" + "plagen 2\n" + "allzu 1\n" + "meisten 1\n" + "o 1\n" + "pfade 1\n" + "kennst 1\n" + "nichts 3\n" + "gedanken 1\n" + "befriedigt 1\n" + "mich 6\n" + "s 3\n" + "es 8\n" + "verneinen 1\n" + "er 13\n" + "gleich 1\n" + "baeumchen 1\n" + "donnergang 1\n" + "wunderlich 1\n" + "reise 1\n" + "urquell 1\n" + "doch 3\n" + "aufs 2\n" + "toten 1\n" + "niemals 1\n" + "eine 2\n" + "hab 1\n" + "darfst 1\n" + "da 5\n" + "gen 1\n" + "einem 2\n" + "teil 1\n" + "das 7\n" + "speise 1\n" + "wenig 1\n" + "sterne 1\n" + "geb 1\n" + "welten 1\n" + "alle 3\n" + "toent 1\n" + "gras 1\n" + "felsen 1\n" + "kette 1\n" + "ich 14\n" + "fuer 2\n" + "als 3\n" + "mein 1\n" + "schoene 1\n" + "verzeih 1\n" + "schwankender 1\n" + "wie 9\n" + "menschlich 1\n" + "gaertner 1\n" + "taetigkeit 1\n" + "bange 1\n" + "liebe 1\n" + "sei 2\n" + "seh 1\n" + "tollheit 1\n" + "am 6\n" + "michael 1\n" + "geist 1\n" + "ab 1\n" + "nahst 1\n" + "vollendet 1\n" + "liebt 1\n" + "brausen 1\n" + "nase 1\n" + "erlaubt 1\n" + "weiss 2\n" + "schnellem 1\n" + "deinem 1\n" + "gleichem 1\n" + "gaerung 1\n" + "dauernden 1\n" + "deines 1\n" + "vorgeschriebne 1\n" + "irdisch 1\n" + "worte 1\n" + "verehren 1\n" + "hohen 2\n" + "weise 2\n" + "kuenft 1\n" + "werdende 1\n" + "wette 2\n" + "wuetend 1\n" + "erscheinung 1\n" + "gar 2\n" + "verlieren 1\n" + "braucht 1\n" + "weiter 1\n" + "trank 1\n" + "tierischer 1\n" + "wohl 1\n" + "verteilen 1\n" + "verhoehnt 1\n" + "schaeumt 1\n" + "himmelslichts 1\n" + "unbedingte 1\n" + "herzlich 1\n" + "anblick 2\n" + "nennt 1\n" + "gruent 1\n" + "bluet 1\n" + "leichnam 1\n" + "erschlaffen 1\n" + "jammertagen 1\n" + "zieh 1\n" + "ihm 3\n" + "besondre 1\n" + "ihn 5\n" + "grossen 1\n" + "vollen 1\n" + "ihr 7\n" + "boten 1\n" + "voller 1\n" + "singt 1\n" + "muhme 1\n" + "schon 1\n" + "last 1\n" + "kleine 1\n" + "paradieseshelle 1\n" + "nein 1\n" + "echten 1\n" + "unter 1\n" + "bei 1\n" + "herr 11\n" + "gern 3\n" + "sphaerenlauf 1\n" + "stets 1\n" + "ganze 1\n" + "braechte 1\n" + "fordert 1\n" + "schoensten 1\n" + "herrlich 2\n" + "gegeben 1\n" + "allein 2\n" + "reichen 1\n" + "schauervoller 1\n" + "musst 1\n" + "recht 1\n" + "bleibt 1\n" + "pracht 1\n" + "treibt 1\n" + "befangen 1\n" + "was 2\n" + "menschen 3\n" + "jede 1\n" + "hohe 1\n" + "tiefsten 1\n" + "bilden 1\n" + "drum 1\n" + "gibt 2\n" + "guter 1\n" + "fuerwahr 1\n" + "im 3\n" + "grund 1\n" + "in 9\n" + "hoechste 1\n" + "schliesst 1\n" + "fels 1\n" + "steh 1\n" + "euer 1\n" + "erster 1\n" + "ersten 3\n" + "goettersoehne 1\n" + "brechen 1\n" + "tiefen 1\n" + "frucht 1\n" + "kreis 1\n" + "siehst 1\n" + "wege 1\n" + "ist 8\n" + "zikaden 1\n" + "frischen 1\n" + "ruh 1\n" + "deine 2\n" + "maus 1\n" + "brudersphaeren 1\n" + "nachher 1\n" + "euch 4\n" + "gnaden 1\n" + "anzuklagen 1\n" + "schlange 1\n" + "staerke 2\n" + "erde 4\n" + "verlaub 1\n" + "sanfte 1\n" + "holden 1\n" + "sonst 1\n" + "treten 1\n" + "sahst 1\n" + "alten 1\n" + "um 1\n" + "wieder 1\n" + "alter 1\n" + "altes 1\n" + "nun 1\n" + "lieb 1\n" + "gesellen 1\n" + "erscheinen 1\n" + "wirkt 2\n" + "haettst 2\n" + "nur 7\n" + "tiefbewegte 1\n" + "lachen 2\n" + "drange 1\n" + "schlag 1\n" + "schein 1\n" + "muss 1\n" + "verworren 1\n" + "weges 1\n" + "allen 1\n" + "gewoehnlich 1\n" + "alles 1\n" + "halb 1\n" + "stuerme 1\n" + "springt 1\n" + "sollt 1\n" + "klarheit 1\n" + "so 6\n" + "erfassen 1\n" + "liedchen 1\n" + "prolog 1\n" + "zur 1\n" + "fressen 1\n" + "zum 1\n" + "faust 2\n" + "erzengel 2\n" + "jahre 1\n" + "sonn 1\n" + "raphael 1\n" + "land 2\n" + "lang 1\n" + "gelange 1\n" + "lust 2\n" + "welt 1\n" + "sehe 1\n" + "ihre 1\n" + "jedes 1\n" + "erfreut 1\n" + "seiner 1\n" + "denn 1\n" + "wandeln 1\n" + "wechselt 1\n" + "jeden 1\n" + "dort 1\n" + "schlecht 1\n" + "wenigsten 1\n" + "wuerd 1\n" + "schranken 1\n" + "bewusst 2\n" + "seinem 2\n" + "gehasst 1\n" + "sein 1\n" + "meinem 1\n" + "meinen 1\n" + "pathos 1\n" + "herrn 1\n" + "lange 2\n" + "herab 1\n" + "diesen 1\n" + "ihren 1\n" + "beruehmte 1\n" + "goethe 1\n" + "tag 3\n" + "tier 1\n" + "quark 1\n" + "dank 1\n" + "seine 1\n" + "teufel 2\n" + "zweck 1\n" + "wenn 7\n" + "soll 1\n" + "wirkung 1\n" + "erlaubnis 1\n" + "lebendig 1\n" + "uns 1\n" + "leicht 1\n" + "gewiss 1\n" + "schnell 1\n" + "und 29\n" + "gerne 1\n" + "rechten 1\n" + "umher 2\n" + "vernunft 1\n" + "grase 1\n" + "nach 1\n" + "leben 1\n" + "gott 1\n" + "der 29\n" + "des 5\n" + "doktor 1\n" + "beschaemt 1\n" + "dreht 1\n" + "habe 1\n" + "sagen 2\n" + "bekennen 1\n" + "dunklen 1\n" + "wettet 1\n" + "den 9\n" + "mephistopheles 9\n" + "dem 4\n" + "auch 4\n" + "kann 2\n" + "armen 1\n" + "mir 9\n" + "strebt 1\n" + "gut 2\n" + "mit 11\n" + "bald 2\n" + "himmlischen 1\n" + "himmel 3\n" + "noch 3\n" + "kannst 1\n" + "deinesgleichen 1\n" + "flammt 1\n" + "ergruenden 2\n" + "nacht 1\n" + "scheint 1\n" + "ferne 2\n" + "tragoedie 1\n" + "abgewoehnt 1\n" + "reizt 1\n" + "geistern 1\n" + "nicht 10\n" + "sacht 1\n" + "unbegreiflich 2\n" + "schnelle 1\n" + "einmal 1\n" + "werd 1\n" + "werke 2\n" + "begraebt 1\n" + "knecht 1\n" + "rings 1\n" + "wird 1\n" + "katze 1\n" + "huete 1\n" + "fortgerissen 1\n" + "gebt 1\n" + "huebsch 1\n" + "hast 1\n" + "irrt 1\n" + "befinde 1\n" + "sind 2\n" + "fuehren 2\n" + "fliegt 1\n" + "ewig 3\n" + "brust 2\n" + "sonne 1\n" + "sprechen 1\n" + "ein 3\n" + "strasse 1\n" + "von 8\n" + "ueberlassen 1\n" + "dir 4\n" + "vom 3\n" + "zu 11\n" + "schwebt 1\n" + "die 22\n" + "vor 2\n" + "wangen 1\n" + "wettgesang 1\n" + "donnerschlags 1\n" + "find 1\n" + "dich 3\n" + "umfass 1\n" + "verboten 1\n" + "laeg 1\n" + "nie 1\n" + "drei 2\n" + "dauern 1\n" + "toren 1\n" + "dauert 1\n" + "verheeren 1\n" + "fliegend 1\n" + "aus 1\n" + "staub 1\n" + "fluessen 1\n" + "haus 1\n" + "auf 5\n" + "dient 2\n" + "tiefer 1\n" + "naeh 1\n" + "zieren 1\n"; public static final String STREAMING_COUNTS_AS_TUPLES = "(machen,1)\n" + "(zeit,1)\n" + "(zeit,2)\n" + "(heerscharen,1)\n" + "(keiner,1)\n" + "(keiner,2)\n" + "(meine,1)\n" + "(meine,2)\n" + "(meine,3)\n" + "(fuehr,1)\n" + "(triumph,1)\n" + "(kommst,1)\n" + "(frei,1)\n" + "(schaffen,1)\n" + "(gesinde,1)\n" + "(langbeinigen,1)\n" + "(schalk,1)\n" + "(besser,1)\n" + "(solang,1)\n" + "(meer,1)\n" + "(meer,2)\n" + "(meer,3)\n" + "(meer,4)\n" + "(fragst,1)\n" + "(gabriel,1)\n" + "(selbst,1)\n" + "(selbst,2)\n" + "(bin,1)\n" + "(sich,1)\n" + "(sich,2)\n" + "(sich,3)\n" + "(sich,4)\n" + "(sich,5)\n" + "(sich,6)\n" + "(sich,7)\n" + "(du,1)\n" + "(du,2)\n" + "(du,3)\n" + "(du,4)\n" + "(du,5)\n" + "(du,6)\n" + "(du,7)\n" + "(du,8)\n" + "(du,9)\n" + "(du,10)\n" + "(du,11)\n" + "(sogar,1)\n" + "(geht,1)\n" + "(immer,1)\n" + "(immer,2)\n" + "(immer,3)\n" + "(immer,4)\n" + "(mensch,1)\n" + "(mensch,2)\n" + "(befestigt,1)\n" + "(lebt,1)\n" + "(lebt,2)\n" + "(mag,1)\n" + "(mag,2)\n" + "(mag,3)\n" + "(engeln,1)\n" + "(engeln,2)\n" + "(breiten,1)\n" + "(blitzendes,1)\n" + "(tags,1)\n" + "(sie,1)\n" + "(sie,2)\n" + "(plagen,1)\n" + "(plagen,2)\n" + "(allzu,1)\n" + "(meisten,1)\n" + "(o,1)\n" + "(pfade,1)\n" + "(kennst,1)\n" + "(nichts,1)\n" + "(nichts,2)\n" + "(nichts,3)\n" + "(gedanken,1)\n" + "(befriedigt,1)\n" + "(mich,1)\n" + "(mich,2)\n" + "(mich,3)\n" + "(mich,4)\n" + "(mich,5)\n" + "(mich,6)\n" + "(s,1)\n" + "(s,2)\n" + "(s,3)\n" + "(es,1)\n" + "(es,2)\n" + "(es,3)\n" + "(es,4)\n" + "(es,5)\n" + "(es,6)\n" + "(es,7)\n" + "(es,8)\n" + "(verneinen,1)\n" + "(er,1)\n" + "(er,2)\n" + "(er,3)\n" + "(er,4)\n" + "(er,5)\n" + "(er,6)\n" + "(er,7)\n" + "(er,8)\n" + "(er,9)\n" + "(er,10)\n" + "(er,11)\n" + "(er,12)\n" + "(er,13)\n" + "(gleich,1)\n" + "(baeumchen,1)\n" + "(donnergang,1)\n" + "(wunderlich,1)\n" + "(reise,1)\n" + "(urquell,1)\n" + "(doch,1)\n" + "(doch,2)\n" + "(doch,3)\n" + "(aufs,1)\n" + "(aufs,2)\n" + "(toten,1)\n" + "(niemals,1)\n" + "(eine,1)\n" + "(eine,2)\n" + "(hab,1)\n" + "(darfst,1)\n" + "(da,1)\n" + "(da,2)\n" + "(da,3)\n" + "(da,4)\n" + "(da,5)\n" + "(gen,1)\n" + "(einem,1)\n" + "(einem,2)\n" + "(teil,1)\n" + "(das,1)\n" + "(das,2)\n" + "(das,3)\n" + "(das,4)\n" + "(das,5)\n" + "(das,6)\n" + "(das,7)\n" + "(speise,1)\n" + "(wenig,1)\n" + "(sterne,1)\n" + "(geb,1)\n" + "(welten,1)\n" + "(alle,1)\n" + "(alle,2)\n" + "(alle,3)\n" + "(toent,1)\n" + "(gras,1)\n" + "(felsen,1)\n" + "(kette,1)\n" + "(ich,1)\n" + "(ich,2)\n" + "(ich,3)\n" + "(ich,4)\n" + "(ich,5)\n" + "(ich,6)\n" + "(ich,7)\n" + "(ich,8)\n" + "(ich,9)\n" + "(ich,10)\n" + "(ich,11)\n" + "(ich,12)\n" + "(ich,13)\n" + "(ich,14)\n" + "(fuer,1)\n" + "(fuer,2)\n" + "(als,1)\n" + "(als,2)\n" + "(als,3)\n" + "(mein,1)\n" + "(schoene,1)\n" + "(verzeih,1)\n" + "(schwankender,1)\n" + "(wie,1)\n" + "(wie,2)\n" + "(wie,3)\n" + "(wie,4)\n" + "(wie,5)\n" + "(wie,6)\n" + "(wie,7)\n" + "(wie,8)\n" + "(wie,9)\n" + "(menschlich,1)\n" + "(gaertner,1)\n" + "(taetigkeit,1)\n" + "(bange,1)\n" + "(liebe,1)\n" + "(sei,1)\n" + "(sei,2)\n" + "(seh,1)\n" + "(tollheit,1)\n" + "(am,1)\n" + "(am,2)\n" + "(am,3)\n" + "(am,4)\n" + "(am,5)\n" + "(am,6)\n" + "(michael,1)\n" + "(geist,1)\n" + "(ab,1)\n" + "(nahst,1)\n" + "(vollendet,1)\n" + "(liebt,1)\n" + "(brausen,1)\n" + "(nase,1)\n" + "(erlaubt,1)\n" + "(weiss,1)\n" + "(weiss,2)\n" + "(schnellem,1)\n" + "(deinem,1)\n" + "(gleichem,1)\n" + "(gaerung,1)\n" + "(dauernden,1)\n" + "(deines,1)\n" + "(vorgeschriebne,1)\n" + "(irdisch,1)\n" + "(worte,1)\n" + "(verehren,1)\n" + "(hohen,1)\n" + "(hohen,2)\n" + "(weise,1)\n" + "(weise,2)\n" + "(kuenft,1)\n" + "(werdende,1)\n" + "(wette,1)\n" + "(wette,2)\n" + "(wuetend,1)\n" + "(erscheinung,1)\n" + "(gar,1)\n" + "(gar,2)\n" + "(verlieren,1)\n" + "(braucht,1)\n" + "(weiter,1)\n" + "(trank,1)\n" + "(tierischer,1)\n" + "(wohl,1)\n" + "(verteilen,1)\n" + "(verhoehnt,1)\n" + "(schaeumt,1)\n" + "(himmelslichts,1)\n" + "(unbedingte,1)\n" + "(herzlich,1)\n" + "(anblick,1)\n" + "(anblick,2)\n" + "(nennt,1)\n" + "(gruent,1)\n" + "(bluet,1)\n" + "(leichnam,1)\n" + "(erschlaffen,1)\n" + "(jammertagen,1)\n" + "(zieh,1)\n" + "(ihm,1)\n" + "(ihm,2)\n" + "(ihm,3)\n" + "(besondre,1)\n" + "(ihn,1)\n" + "(ihn,2)\n" + "(ihn,3)\n" + "(ihn,4)\n" + "(ihn,5)\n" + "(grossen,1)\n" + "(vollen,1)\n" + "(ihr,1)\n" + "(ihr,2)\n" + "(ihr,3)\n" + "(ihr,4)\n" + "(ihr,5)\n" + "(ihr,6)\n" + "(ihr,7)\n" + "(boten,1)\n" + "(voller,1)\n" + "(singt,1)\n" + "(muhme,1)\n" + "(schon,1)\n" + "(last,1)\n" + "(kleine,1)\n" + "(paradieseshelle,1)\n" + "(nein,1)\n" + "(echten,1)\n" + "(unter,1)\n" + "(bei,1)\n" + "(herr,1)\n" + "(herr,2)\n" + "(herr,3)\n" + "(herr,4)\n" + "(herr,5)\n" + "(herr,6)\n" + "(herr,7)\n" + "(herr,8)\n" + "(herr,9)\n" + "(herr,10)\n" + "(herr,11)\n" + "(gern,1)\n" + "(gern,2)\n" + "(gern,3)\n" + "(sphaerenlauf,1)\n" + "(stets,1)\n" + "(ganze,1)\n" + "(braechte,1)\n" + "(fordert,1)\n" + "(schoensten,1)\n" + "(herrlich,1)\n" + "(herrlich,2)\n" + "(gegeben,1)\n" + "(allein,1)\n" + "(allein,2)\n" + "(reichen,1)\n" + "(schauervoller,1)\n" + "(musst,1)\n" + "(recht,1)\n" + "(bleibt,1)\n" + "(pracht,1)\n" + "(treibt,1)\n" + "(befangen,1)\n" + "(was,1)\n" + "(was,2)\n" + "(menschen,1)\n" + "(menschen,2)\n" + "(menschen,3)\n" + "(jede,1)\n" + "(hohe,1)\n" + "(tiefsten,1)\n" + "(bilden,1)\n" + "(drum,1)\n" + "(gibt,1)\n" + "(gibt,2)\n" + "(guter,1)\n" + "(fuerwahr,1)\n" + "(im,1)\n" + "(im,2)\n" + "(im,3)\n" + "(grund,1)\n" + "(in,1)\n" + "(in,2)\n" + "(in,3)\n" + "(in,4)\n" + "(in,5)\n" + "(in,6)\n" + "(in,7)\n" + "(in,8)\n" + "(in,9)\n" + "(hoechste,1)\n" + "(schliesst,1)\n" + "(fels,1)\n" + "(steh,1)\n" + "(euer,1)\n" + "(erster,1)\n" + "(ersten,1)\n" + "(ersten,2)\n" + "(ersten,3)\n" + "(goettersoehne,1)\n" + "(brechen,1)\n" + "(tiefen,1)\n" + "(frucht,1)\n" + "(kreis,1)\n" + "(siehst,1)\n" + "(wege,1)\n" + "(ist,1)\n" + "(ist,2)\n" + "(ist,3)\n" + "(ist,4)\n" + "(ist,5)\n" + "(ist,6)\n" + "(ist,7)\n" + "(ist,8)\n" + "(zikaden,1)\n" + "(frischen,1)\n" + "(ruh,1)\n" + "(deine,1)\n" + "(deine,2)\n" + "(maus,1)\n" + "(brudersphaeren,1)\n" + "(nachher,1)\n" + "(euch,1)\n" + "(euch,2)\n" + "(euch,3)\n" + "(euch,4)\n" + "(gnaden,1)\n" + "(anzuklagen,1)\n" + "(schlange,1)\n" + "(staerke,1)\n" + "(staerke,2)\n" + "(erde,1)\n" + "(erde,2)\n" + "(erde,3)\n" + "(erde,4)\n" + "(verlaub,1)\n" + "(sanfte,1)\n" + "(holden,1)\n" + "(sonst,1)\n" + "(treten,1)\n" + "(sahst,1)\n" + "(alten,1)\n" + "(um,1)\n" + "(wieder,1)\n" + "(alter,1)\n" + "(altes,1)\n" + "(nun,1)\n" + "(lieb,1)\n" + "(gesellen,1)\n" + "(erscheinen,1)\n" + "(wirkt,1)\n" + "(wirkt,2)\n" + "(haettst,1)\n" + "(haettst,2)\n" + "(nur,1)\n" + "(nur,2)\n" + "(nur,3)\n" + "(nur,4)\n" + "(nur,5)\n" + "(nur,6)\n" + "(nur,7)\n" + "(tiefbewegte,1)\n" + "(lachen,1)\n" + "(lachen,2)\n" + "(drange,1)\n" + "(schlag,1)\n" + "(schein,1)\n" + "(muss,1)\n" + "(verworren,1)\n" + "(weges,1)\n" + "(allen,1)\n" + "(gewoehnlich,1)\n" + "(alles,1)\n" + "(halb,1)\n" + "(stuerme,1)\n" + "(springt,1)\n" + "(sollt,1)\n" + "(klarheit,1)\n" + "(so,1)\n" + "(so,2)\n" + "(so,3)\n" + "(so,4)\n" + "(so,5)\n" + "(so,6)\n" + "(erfassen,1)\n" + "(liedchen,1)\n" + "(prolog,1)\n" + "(zur,1)\n" + "(fressen,1)\n" + "(zum,1)\n" + "(faust,1)\n" + "(faust,2)\n" + "(erzengel,1)\n" + "(erzengel,2)\n" + "(jahre,1)\n" + "(sonn,1)\n" + "(raphael,1)\n" + "(land,1)\n" + "(land,2)\n" + "(lang,1)\n" + "(gelange,1)\n" + "(lust,1)\n" + "(lust,2)\n" + "(welt,1)\n" + "(sehe,1)\n" + "(ihre,1)\n" + "(jedes,1)\n" + "(erfreut,1)\n" + "(seiner,1)\n" + "(denn,1)\n" + "(wandeln,1)\n" + "(wechselt,1)\n" + "(jeden,1)\n" + "(dort,1)\n" + "(schlecht,1)\n" + "(wenigsten,1)\n" + "(wuerd,1)\n" + "(schranken,1)\n" + "(bewusst,1)\n" + "(bewusst,2)\n" + "(seinem,1)\n" + "(seinem,2)\n" + "(gehasst,1)\n" + "(sein,1)\n" + "(meinem,1)\n" + "(meinen,1)\n" + "(pathos,1)\n" + "(herrn,1)\n" + "(lange,1)\n" + "(lange,2)\n" + "(herab,1)\n" + "(diesen,1)\n" + "(ihren,1)\n" + "(beruehmte,1)\n" + "(goethe,1)\n" + "(tag,1)\n" + "(tag,2)\n" + "(tag,3)\n" + "(tier,1)\n" + "(quark,1)\n" + "(dank,1)\n" + "(seine,1)\n" + "(teufel,1)\n" + "(teufel,2)\n" + "(zweck,1)\n" + "(wenn,1)\n" + "(wenn,2)\n" + "(wenn,3)\n" + "(wenn,4)\n" + "(wenn,5)\n" + "(wenn,6)\n" + "(wenn,7)\n" + "(soll,1)\n" + "(wirkung,1)\n" + "(erlaubnis,1)\n" + "(lebendig,1)\n" + "(uns,1)\n" + "(leicht,1)\n" + "(gewiss,1)\n" + "(schnell,1)\n" + "(und,1)\n" + "(und,2)\n" + "(und,3)\n" + "(und,4)\n" + "(und,5)\n" + "(und,6)\n" + "(und,7)\n" + "(und,8)\n" + "(und,9)\n" + "(und,10)\n" + "(und,11)\n" + "(und,12)\n" + "(und,13)\n" + "(und,14)\n" + "(und,15)\n" + "(und,16)\n" + "(und,17)\n" + "(und,18)\n" + "(und,19)\n" + "(und,20)\n" + "(und,21)\n" + "(und,22)\n" + "(und,23)\n" + "(und,24)\n" + "(und,25)\n" + "(und,26)\n" + "(und,27)\n" + "(und,28)\n" + "(und,29)\n" + "(gerne,1)\n" + "(rechten,1)\n" + "(umher,1)\n" + "(umher,2)\n" + "(vernunft,1)\n" + "(grase,1)\n" + "(nach,1)\n" + "(leben,1)\n" + "(gott,1)\n" + "(der,1)\n" + "(der,2)\n" + "(der,3)\n" + "(der,4)\n" + "(der,5)\n" + "(der,6)\n" + "(der,7)\n" + "(der,8)\n" + "(der,9)\n" + "(der,10)\n" + "(der,11)\n" + "(der,12)\n" + "(der,13)\n" + "(der,14)\n" + "(der,15)\n" + "(der,16)\n" + "(der,17)\n" + "(der,18)\n" + "(der,19)\n" + "(der,20)\n" + "(der,21)\n" + "(der,22)\n" + "(der,23)\n" + "(der,24)\n" + "(der,25)\n" + "(der,26)\n" + "(der,27)\n" + "(der,28)\n" + "(der,29)\n" + "(des,1)\n" + "(des,2)\n" + "(des,3)\n" + "(des,4)\n" + "(des,5)\n" + "(doktor,1)\n" + "(beschaemt,1)\n" + "(dreht,1)\n" + "(habe,1)\n" + "(sagen,1)\n" + "(sagen,2)\n" + "(bekennen,1)\n" + "(dunklen,1)\n" + "(wettet,1)\n" + "(den,1)\n" + "(den,2)\n" + "(den,3)\n" + "(den,4)\n" + "(den,5)\n" + "(den,6)\n" + "(den,7)\n" + "(den,8)\n" + "(den,9)\n" + "(mephistopheles,1)\n" + "(mephistopheles,2)\n" + "(mephistopheles,3)\n" + "(mephistopheles,4)\n" + "(mephistopheles,5)\n" + "(mephistopheles,6)\n" + "(mephistopheles,7)\n" + "(mephistopheles,8)\n" + "(mephistopheles,9)\n" + "(dem,1)\n" + "(dem,2)\n" + "(dem,3)\n" + "(dem,4)\n" + "(auch,1)\n" + "(auch,2)\n" + "(auch,3)\n" + "(auch,4)\n" + "(kann,1)\n" + "(kann,2)\n" + "(armen,1)\n" + "(mir,1)\n" + "(mir,2)\n" + "(mir,3)\n" + "(mir,4)\n" + "(mir,5)\n" + "(mir,6)\n" + "(mir,7)\n" + "(mir,8)\n" + "(mir,9)\n" + "(strebt,1)\n" + "(gut,1)\n" + "(gut,2)\n" + "(mit,1)\n" + "(mit,2)\n" + "(mit,3)\n" + "(mit,4)\n" + "(mit,5)\n" + "(mit,6)\n" + "(mit,7)\n" + "(mit,8)\n" + "(mit,9)\n" + "(mit,10)\n" + "(mit,11)\n" + "(bald,1)\n" + "(bald,2)\n" + "(himmlischen,1)\n" + "(himmel,1)\n" + "(himmel,2)\n" + "(himmel,3)\n" + "(noch,1)\n" + "(noch,2)\n" + "(noch,3)\n" + "(kannst,1)\n" + "(deinesgleichen,1)\n" + "(flammt,1)\n" + "(ergruenden,1)\n" + "(ergruenden,2)\n" + "(nacht,1)\n" + "(scheint,1)\n" + "(ferne,1)\n" + "(ferne,2)\n" + "(tragoedie,1)\n" + "(abgewoehnt,1)\n" + "(reizt,1)\n" + "(geistern,1)\n" + "(nicht,1)\n" + "(nicht,2)\n" + "(nicht,3)\n" + "(nicht,4)\n" + "(nicht,5)\n" + "(nicht,6)\n" + "(nicht,7)\n" + "(nicht,8)\n" + "(nicht,9)\n" + "(nicht,10)\n" + "(sacht,1)\n" + "(unbegreiflich,1)\n" + "(unbegreiflich,2)\n" + "(schnelle,1)\n" + "(einmal,1)\n" + "(werd,1)\n" + "(werke,1)\n" + "(werke,2)\n" + "(begraebt,1)\n" + "(knecht,1)\n" + "(rings,1)\n" + "(wird,1)\n" + "(katze,1)\n" + "(huete,1)\n" + "(fortgerissen,1)\n" + "(gebt,1)\n" + "(huebsch,1)\n" + "(hast,1)\n" + "(irrt,1)\n" + "(befinde,1)\n" + "(sind,1)\n" + "(sind,2)\n" + "(fuehren,1)\n" + "(fuehren,2)\n" + "(fliegt,1)\n" + "(ewig,1)\n" + "(ewig,2)\n" + "(ewig,3)\n" + "(brust,1)\n" + "(brust,2)\n" + "(sonne,1)\n" + "(sprechen,1)\n" + "(ein,1)\n" + "(ein,2)\n" + "(ein,3)\n" + "(strasse,1)\n" + "(von,1)\n" + "(von,2)\n" + "(von,3)\n" + "(von,4)\n" + "(von,5)\n" + "(von,6)\n" + "(von,7)\n" + "(von,8)\n" + "(ueberlassen,1)\n" + "(dir,1)\n" + "(dir,2)\n" + "(dir,3)\n" + "(dir,4)\n" + "(vom,1)\n" + "(vom,2)\n" + "(vom,3)\n" + "(zu,1)\n" + "(zu,2)\n" + "(zu,3)\n" + "(zu,4)\n" + "(zu,5)\n" + "(zu,6)\n" + "(zu,7)\n" + "(zu,8)\n" + "(zu,9)\n" + "(zu,10)\n" + "(zu,11)\n" + "(schwebt,1)\n" + "(die,1)\n" + "(die,2)\n" + "(die,3)\n" + "(die,4)\n" + "(die,5)\n" + "(die,6)\n" + "(die,7)\n" + "(die,8)\n" + "(die,9)\n" + "(die,10)\n" + "(die,11)\n" + "(die,12)\n" + "(die,13)\n" + "(die,14)\n" + "(die,15)\n" + "(die,16)\n" + "(die,17)\n" + "(die,18)\n" + "(die,19)\n" + "(die,20)\n" + "(die,21)\n" + "(die,22)\n" + "(vor,1)\n" + "(vor,2)\n" + "(wangen,1)\n" + "(wettgesang,1)\n" + "(donnerschlags,1)\n" + "(find,1)\n" + "(dich,1)\n" + "(dich,2)\n" + "(dich,3)\n" + "(umfass,1)\n" + "(verboten,1)\n" + "(laeg,1)\n" + "(nie,1)\n" + "(drei,1)\n" + "(drei,2)\n" + "(dauern,1)\n" + "(toren,1)\n" + "(dauert,1)\n" + "(verheeren,1)\n" + "(fliegend,1)\n" + "(aus,1)\n" + "(staub,1)\n" + "(fluessen,1)\n" + "(haus,1)\n" + "(auf,1)\n" + "(auf,2)\n" + "(auf,3)\n" + "(auf,4)\n" + "(auf,5)\n" + "(dient,1)\n" + "(dient,2)\n" + "(tiefer,1)\n" + "(naeh,1)\n" + "(zieren,1)\n"; public static final String COUNTS_AS_TUPLES = "(machen,1)\n" + "(zeit,2)\n" + "(heerscharen,1)\n" + "(keiner,2)\n" + "(meine,3)\n" + "(fuehr,1)\n" + "(triumph,1)\n" + "(kommst,1)\n" + "(frei,1)\n" + "(schaffen,1)\n" + "(gesinde,1)\n" + "(langbeinigen,1)\n" + "(schalk,1)\n" + "(besser,1)\n" + "(solang,1)\n" + "(meer,4)\n" + "(fragst,1)\n" + "(gabriel,1)\n" + "(selbst,2)\n" + "(bin,1)\n" + "(sich,7)\n" + "(du,11)\n" + "(sogar,1)\n" + "(geht,1)\n" + "(immer,4)\n" + "(mensch,2)\n" + "(befestigt,1)\n" + "(lebt,2)\n" + "(mag,3)\n" + "(engeln,2)\n" + "(breiten,1)\n" + "(blitzendes,1)\n" + "(tags,1)\n" + "(sie,2)\n" + "(plagen,2)\n" + "(allzu,1)\n" + "(meisten,1)\n" + "(o,1)\n" + "(pfade,1)\n" + "(kennst,1)\n" + "(nichts,3)\n" + "(gedanken,1)\n" + "(befriedigt,1)\n" + "(mich,6)\n" + "(s,3)\n" + "(es,8)\n" + "(verneinen,1)\n" + "(er,13)\n" + "(gleich,1)\n" + "(baeumchen,1)\n" + "(donnergang,1)\n" + "(wunderlich,1)\n" + "(reise,1)\n" + "(urquell,1)\n" + "(doch,3)\n" + "(aufs,2)\n" + "(toten,1)\n" + "(niemals,1)\n" + "(eine,2)\n" + "(hab,1)\n" + "(darfst,1)\n" + "(da,5)\n" + "(gen,1)\n" + "(einem,2)\n" + "(teil,1)\n" + "(das,7)\n" + "(speise,1)\n" + "(wenig,1)\n" + "(sterne,1)\n" + "(geb,1)\n" + "(welten,1)\n" + "(alle,3)\n" + "(toent,1)\n" + "(gras,1)\n" + "(felsen,1)\n" + "(kette,1)\n" + "(ich,14)\n" + "(fuer,2)\n" + "(als,3)\n" + "(mein,1)\n" + "(schoene,1)\n" + "(verzeih,1)\n" + "(schwankender,1)\n" + "(wie,9)\n" + "(menschlich,1)\n" + "(gaertner,1)\n" + "(taetigkeit,1)\n" + "(bange,1)\n" + "(liebe,1)\n" + "(sei,2)\n" + "(seh,1)\n" + "(tollheit,1)\n" + "(am,6)\n" + "(michael,1)\n" + "(geist,1)\n" + "(ab,1)\n" + "(nahst,1)\n" + "(vollendet,1)\n" + "(liebt,1)\n" + "(brausen,1)\n" + "(nase,1)\n" + "(erlaubt,1)\n" + "(weiss,2)\n" + "(schnellem,1)\n" + "(deinem,1)\n" + "(gleichem,1)\n" + "(gaerung,1)\n" + "(dauernden,1)\n" + "(deines,1)\n" + "(vorgeschriebne,1)\n" + "(irdisch,1)\n" + "(worte,1)\n" + "(verehren,1)\n" + "(hohen,2)\n" + "(weise,2)\n" + "(kuenft,1)\n" + "(werdende,1)\n" + "(wette,2)\n" + "(wuetend,1)\n" + "(erscheinung,1)\n" + "(gar,2)\n" + "(verlieren,1)\n" + "(braucht,1)\n" + "(weiter,1)\n" + "(trank,1)\n" + "(tierischer,1)\n" + "(wohl,1)\n" + "(verteilen,1)\n" + "(verhoehnt,1)\n" + "(schaeumt,1)\n" + "(himmelslichts,1)\n" + "(unbedingte,1)\n" + "(herzlich,1)\n" + "(anblick,2)\n" + "(nennt,1)\n" + "(gruent,1)\n" + "(bluet,1)\n" + "(leichnam,1)\n" + "(erschlaffen,1)\n" + "(jammertagen,1)\n" + "(zieh,1)\n" + "(ihm,3)\n" + "(besondre,1)\n" + "(ihn,5)\n" + "(grossen,1)\n" + "(vollen,1)\n" + "(ihr,7)\n" + "(boten,1)\n" + "(voller,1)\n" + "(singt,1)\n" + "(muhme,1)\n" + "(schon,1)\n" + "(last,1)\n" + "(kleine,1)\n" + "(paradieseshelle,1)\n" + "(nein,1)\n" + "(echten,1)\n" + "(unter,1)\n" + "(bei,1)\n" + "(herr,11)\n" + "(gern,3)\n" + "(sphaerenlauf,1)\n" + "(stets,1)\n" + "(ganze,1)\n" + "(braechte,1)\n" + "(fordert,1)\n" + "(schoensten,1)\n" + "(herrlich,2)\n" + "(gegeben,1)\n" + "(allein,2)\n" + "(reichen,1)\n" + "(schauervoller,1)\n" + "(musst,1)\n" + "(recht,1)\n" + "(bleibt,1)\n" + "(pracht,1)\n" + "(treibt,1)\n" + "(befangen,1)\n" + "(was,2)\n" + "(menschen,3)\n" + "(jede,1)\n" + "(hohe,1)\n" + "(tiefsten,1)\n" + "(bilden,1)\n" + "(drum,1)\n" + "(gibt,2)\n" + "(guter,1)\n" + "(fuerwahr,1)\n" + "(im,3)\n" + "(grund,1)\n" + "(in,9)\n" + "(hoechste,1)\n" + "(schliesst,1)\n" + "(fels,1)\n" + "(steh,1)\n" + "(euer,1)\n" + "(erster,1)\n" + "(ersten,3)\n" + "(goettersoehne,1)\n" + "(brechen,1)\n" + "(tiefen,1)\n" + "(frucht,1)\n" + "(kreis,1)\n" + "(siehst,1)\n" + "(wege,1)\n" + "(ist,8)\n" + "(zikaden,1)\n" + "(frischen,1)\n" + "(ruh,1)\n" + "(deine,2)\n" + "(maus,1)\n" + "(brudersphaeren,1)\n" + "(nachher,1)\n" + "(euch,4)\n" + "(gnaden,1)\n" + "(anzuklagen,1)\n" + "(schlange,1)\n" + "(staerke,2)\n" + "(erde,4)\n" + "(verlaub,1)\n" + "(sanfte,1)\n" + "(holden,1)\n" + "(sonst,1)\n" + "(treten,1)\n" + "(sahst,1)\n" + "(alten,1)\n" + "(um,1)\n" + "(wieder,1)\n" + "(alter,1)\n" + "(altes,1)\n" + "(nun,1)\n" + "(lieb,1)\n" + "(gesellen,1)\n" + "(erscheinen,1)\n" + "(wirkt,2)\n" + "(haettst,2)\n" + "(nur,7)\n" + "(tiefbewegte,1)\n" + "(lachen,2)\n" + "(drange,1)\n" + "(schlag,1)\n" + "(schein,1)\n" + "(muss,1)\n" + "(verworren,1)\n" + "(weges,1)\n" + "(allen,1)\n" + "(gewoehnlich,1)\n" + "(alles,1)\n" + "(halb,1)\n" + "(stuerme,1)\n" + "(springt,1)\n" + "(sollt,1)\n" + "(klarheit,1)\n" + "(so,6)\n" + "(erfassen,1)\n" + "(liedchen,1)\n" + "(prolog,1)\n" + "(zur,1)\n" + "(fressen,1)\n" + "(zum,1)\n" + "(faust,2)\n" + "(erzengel,2)\n" + "(jahre,1)\n" + "(sonn,1)\n" + "(raphael,1)\n" + "(land,2)\n" + "(lang,1)\n" + "(gelange,1)\n" + "(lust,2)\n" + "(welt,1)\n" + "(sehe,1)\n" + "(ihre,1)\n" + "(jedes,1)\n" + "(erfreut,1)\n" + "(seiner,1)\n" + "(denn,1)\n" + "(wandeln,1)\n" + "(wechselt,1)\n" + "(jeden,1)\n" + "(dort,1)\n" + "(schlecht,1)\n" + "(wenigsten,1)\n" + "(wuerd,1)\n" + "(schranken,1)\n" + "(bewusst,2)\n" + "(seinem,2)\n" + "(gehasst,1)\n" + "(sein,1)\n" + "(meinem,1)\n" + "(meinen,1)\n" + "(pathos,1)\n" + "(herrn,1)\n" + "(lange,2)\n" + "(herab,1)\n" + "(diesen,1)\n" + "(ihren,1)\n" + "(beruehmte,1)\n" + "(goethe,1)\n" + "(tag,3)\n" + "(tier,1)\n" + "(quark,1)\n" + "(dank,1)\n" + "(seine,1)\n" + "(teufel,2)\n" + "(zweck,1)\n" + "(wenn,7)\n" + "(soll,1)\n" + "(wirkung,1)\n" + "(erlaubnis,1)\n" + "(lebendig,1)\n" + "(uns,1)\n" + "(leicht,1)\n" + "(gewiss,1)\n" + "(schnell,1)\n" + "(und,29)\n" + "(gerne,1)\n" + "(rechten,1)\n" + "(umher,2)\n" + "(vernunft,1)\n" + "(grase,1)\n" + "(nach,1)\n" + "(leben,1)\n" + "(gott,1)\n" + "(der,29)\n" + "(des,5)\n" + "(doktor,1)\n" + "(beschaemt,1)\n" + "(dreht,1)\n" + "(habe,1)\n" + "(sagen,2)\n" + "(bekennen,1)\n" + "(dunklen,1)\n" + "(wettet,1)\n" + "(den,9)\n" + "(mephistopheles,9)\n" + "(dem,4)\n" + "(auch,4)\n" + "(kann,2)\n" + "(armen,1)\n" + "(mir,9)\n" + "(strebt,1)\n" + "(gut,2)\n" + "(mit,11)\n" + "(bald,2)\n" + "(himmlischen,1)\n" + "(himmel,3)\n" + "(noch,3)\n" + "(kannst,1)\n" + "(deinesgleichen,1)\n" + "(flammt,1)\n" + "(ergruenden,2)\n" + "(nacht,1)\n" + "(scheint,1)\n" + "(ferne,2)\n" + "(tragoedie,1)\n" + "(abgewoehnt,1)\n" + "(reizt,1)\n" + "(geistern,1)\n" + "(nicht,10)\n" + "(sacht,1)\n" + "(unbegreiflich,2)\n" + "(schnelle,1)\n" + "(einmal,1)\n" + "(werd,1)\n" + "(werke,2)\n" + "(begraebt,1)\n" + "(knecht,1)\n" + "(rings,1)\n" + "(wird,1)\n" + "(katze,1)\n" + "(huete,1)\n" + "(fortgerissen,1)\n" + "(gebt,1)\n" + "(huebsch,1)\n" + "(hast,1)\n" + "(irrt,1)\n" + "(befinde,1)\n" + "(sind,2)\n" + "(fuehren,2)\n" + "(fliegt,1)\n" + "(ewig,3)\n" + "(brust,2)\n" + "(sonne,1)\n" + "(sprechen,1)\n" + "(ein,3)\n" + "(strasse,1)\n" + "(von,8)\n" + "(ueberlassen,1)\n" + "(dir,4)\n" + "(vom,3)\n" + "(zu,11)\n" + "(schwebt,1)\n" + "(die,22)\n" + "(vor,2)\n" + "(wangen,1)\n" + "(wettgesang,1)\n" + "(donnerschlags,1)\n" + "(find,1)\n" + "(dich,3)\n" + "(umfass,1)\n" + "(verboten,1)\n" + "(laeg,1)\n" + "(nie,1)\n" + "(drei,2)\n" + "(dauern,1)\n" + "(toren,1)\n" + "(dauert,1)\n" + "(verheeren,1)\n" + "(fliegend,1)\n" + "(aus,1)\n" + "(staub,1)\n" + "(fluessen,1)\n" + "(haus,1)\n" + "(auf,5)\n" + "(dient,2)\n" + "(tiefer,1)\n" + "(naeh,1)\n" + "(zieren,1)\n"; private WordCountData() {} }
package dev.kkorolyov.simplestructs; import dev.kkorolyov.simplestructs.Graph.Node; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Objects; import java.util.Optional; import java.util.function.Function; import java.util.stream.StreamSupport; import static dev.kkorolyov.simplefuncs.stream.Iterables.append; import static java.util.Collections.emptySet; import static java.util.Collections.singleton; import static java.util.Collections.singletonMap; import static java.util.Collections.unmodifiableCollection; import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toSet; /** * A collection of values connected by weighted outbound and inbound edges to other values. * @param <T> value type * @param <E> edge type */ public final class Graph<T, E> implements Iterable<Node<T, E>> { private final Map<T, Node<T, E>> nodes = new HashMap<>(); /** * @param value value to check * @return whether this graph contains {@code value} */ public boolean contains(T value) { return get(value) != null; } /** * @param value value to get node for * @return node with {@code value} in this graph, if any */ public Node<T, E> get(T value) { return nodes.get(value); } /** * @param value value to get outbound connected values for * @return values connected to {@code value} by outbound edges */ public Collection<T> getOutbounds(T value) { return getEdgeValues(value, Node::getOutbounds); } /** * @param value value to get inbound connected values for * @return values connected to {@code value} by inbound edges */ public Collection<T> getInbounds(T value) { return getEdgeValues(value, Node::getInbounds); } private Collection<T> getEdgeValues(T value, Function<? super Node<T, E>, ? extends Collection<Node<T, E>>> edgesMapper) { Node<T, E> node = get(value); return node == null ? emptySet() : edgesMapper.apply(node).stream() .map(Node::getValue) .collect(toSet()); } /** * @param value value to get outbound degree for * @return number of outbound edges from {@code value} */ public int outDegree(T value) { Node<T, E> node = get(value); return node == null ? 0 : node.outDegree(); } /** * @param value value to get inbound degree for * @return number of inbound edges to {@code value} */ public int inDegree(T value) { Node<T, E> node = get(value); return node == null ? 0 : node.inDegree(); } /** * @param value value to check for connectivity * @return whether {@code value} is in this graph and connected to at least one other value */ public boolean isConnected(T value) { Node<T, E> node = get(value); return node != null && node.isConnected(); } /** @see #add(Object, Map) */ public Graph<T, E> add(T value, T outbound, E edge) { return add(value, singletonMap(outbound, edge)); } /** * Adds or updates a value in this graph. * @param value value to add or update * @param outbounds values and their respective edges to add as outbound connections from {@code value} * @return {@code this} */ public Graph<T, E> add(T value, Map<? extends T, ? extends E> outbounds) { computeIfAbsent(value) .addEdges(outbounds.entrySet().stream() .collect(toMap( entry -> computeIfAbsent(entry.getKey()), Entry::getValue )) ); return this; } /** @see #add(Object, Iterable) */ @SafeVarargs public final Graph<T, E> add(T value, T... outbounds) { return add(value, Arrays.asList(outbounds)); } /** * Adds or updates a value in this graph. * @param value value to add or update * @param outbounds values to add as outbound connections from {@code value} * @return {@code this} */ public Graph<T, E> add(T value, Iterable<T> outbounds) { computeIfAbsent(value) .addEdges(computeIfAbsent(outbounds)); return this; } /** @see #addUndirected(Object, Map) */ public Graph<T, E> addUndirected(T value, T connected, E edge) { return addUndirected(value, singletonMap(connected, edge)); } /** * Like {@link #add(Object, Map)}, but also adds an inverse edge between connected node pairs. */ public Graph<T, E> addUndirected(T value, Map<? extends T, ? extends E> connecteds) { computeIfAbsent(value) .addEdgesUndirected(connecteds.entrySet().stream() .collect(toMap( entry -> computeIfAbsent(entry.getKey()), Entry::getValue )) ); return this; } /** @see #addUndirected(Object, Iterable) */ @SafeVarargs public final Graph<T, E> addUndirected(T value, T connected, T... connecteds) { return addUndirected(value, append(singleton(connected), connecteds)); } /** * Like {@link #add(Object, Iterable)}, but also adds an inverse edge between connected node pairs. */ public Graph<T, E> addUndirected(T value, Iterable<T> connecteds) { computeIfAbsent(value) .addEdgesUndirected(computeIfAbsent(connecteds)); return this; } /** @see #remove(Iterable) */ @SafeVarargs public final Graph<T, E> remove(T value, T... values) { return remove(append(singleton(value), values)); } /** * Removes {@code values} from this graph. * @param values values to remove * @return {@code this} */ public Graph<T, E> remove(Iterable<T> values) { find(values) .forEach(Node::destroy); return this; } /** @see #sever(Object, Iterable) */ @SafeVarargs public final Graph<T, E> sever(T value, T outbound, T... outbounds) { return sever(value, append(singleton(outbound), outbounds)); } /** * Removes outbound edges from a value in this graph. * @param value value to remove outbound edges for * @param outbounds connected values to remove outbound edges from {@code value} for * @return {@code this} */ public Graph<T, E> sever(T value, Iterable<T> outbounds) { find(value) .ifPresent(node -> node.removeEdges(find(outbounds))); return this; } /** @see #severUndirected(Object, Iterable) */ public Graph<T, E> severUndirected(T value, T connected, T... connecteds) { return severUndirected(value, append(singleton(connected), connecteds)); } /** * Like {@link #sever(Object, Iterable)}, but also removes the inverse edge between connected node pairs. */ public Graph<T, E> severUndirected(T value, Iterable<T> connecteds) { find(value) .ifPresent(node -> node.removeEdgesUndirected(find(connecteds))); return this; } private Node<T, E> computeIfAbsent(T value) { return nodes.computeIfAbsent(value, k -> new Node<>(k, this)); } private Iterable<Node<T, E>> computeIfAbsent(Iterable<T> values) { return StreamSupport.stream(values.spliterator(), false) .map(this::computeIfAbsent) ::iterator; } private Optional<Node<T, E>> find(T value) { return Optional.ofNullable(nodes.get(value)); } private Iterable<Node<T, E>> find(Iterable<T> values) { return StreamSupport.stream(values.spliterator(), false) .map(nodes::get) .filter(Objects::nonNull) ::iterator; } /** @return view over all nodes in this graph */ public Collection<Node<T, E>> getNodes() { return unmodifiableCollection(nodes.values()); } /** @return view over all values in this graph */ public Collection<T> getValues() { return unmodifiableCollection(nodes.keySet()); } /** * Removes all values in this graph. */ public void clear() { nodes.clear(); } /** @return iterator over all nodes in this graph */ @Override public Iterator<Node<T, E>> iterator() { return nodes.values().iterator(); } /** * An individual vertex with outbound and inbound edges in a {@link Graph}. * @param <T> value type * @param <E> edge type */ public static final class Node<T, E> { private final T value; private final Map<Node<T, E>, RelatedNode<T, E>> outbounds = new HashMap<>(); private final Map<Node<T, E>, RelatedNode<T, E>> inbounds = new HashMap<>(); private final Graph<T, E> graph; private Node(T value, Graph<T, E> graph) { this.value = value; this.graph = graph; } /** * Adds outbound edges from this node to each node in {@code outbounds} and inbound edges from each node in {@code outbounds} to this node. * @param outbounds outbound nodes with connection metadata to connect to this node */ private void addEdges(Map<Node<T, E>, ? extends E> outbounds) { outbounds.forEach((outbound, edge) -> { this.outbounds.put(outbound, new RelatedNode<>(outbound, edge)); outbound.inbounds.put(this, new RelatedNode<>(this, edge)); }); } /** * Adds outbound edges from this node to each node in {@code outbounds} and inbound edges from each node in {@code outbounds} to this node. * @param outbounds outbound nodes to connect to this node with {@code null} connection metadata */ private void addEdges(Iterable<Node<T, E>> outbounds) { for (Node<T, E> outbound : outbounds) { this.outbounds.put(outbound, new RelatedNode<>(outbound, null)); outbound.inbounds.put(this, new RelatedNode<>(this, null)); } } /** * Removes outbound edges from this node to each node in {@code outbounds} and inbound edges from each node in {@code outbounds} to this node. * @param outbounds outbound nodes to disconnect from this node */ private void removeEdges(Iterable<Node<T, E>> outbounds) { for (Node<T, E> outbound : outbounds) { this.outbounds.remove(outbound); outbound.inbounds.remove(this); } } /** * Adds 2-way edge pairs from this node to each node in {@code connecteds}. * @param connecteds other nodes with connection metadata to connect to this node in both directions */ private void addEdgesUndirected(Map<Node<T, E>, ? extends E> connecteds) { connecteds.forEach((connected, edge) -> { RelatedNode<T, E> connectedRelated = new RelatedNode<>(connected, edge); RelatedNode<T, E> thisRelated = new RelatedNode<>(this, edge); outbounds.put(connected, connectedRelated); inbounds.put(connected, connectedRelated); connected.outbounds.put(this, thisRelated); connected.inbounds.put(this, thisRelated); }); } /** * Adds 2-way edge pairs from this node to each node in {@code connecteds}. * @param connecteds other nodes to connect to this node with {@code null} connection metadata in both directions */ private void addEdgesUndirected(Iterable<Node<T, E>> connecteds) { for (Node<T, E> connected : connecteds) { RelatedNode<T, E> connectedRelated = new RelatedNode<>(connected, null); RelatedNode<T, E> thisRelated = new RelatedNode<>(this, null); outbounds.put(connected, connectedRelated); inbounds.put(connected, connectedRelated); connected.outbounds.put(this, thisRelated); connected.inbounds.put(this, thisRelated); } } /** * Removes 2-way edge pairs from this node to each node in {@code connecteds}. * @param connecteds other nodes to disconnect from this node in both directions */ private void removeEdgesUndirected(Iterable<Node<T, E>> connecteds) { for (Node<T, E> connected : connecteds) { outbounds.remove(connected); inbounds.remove(connected); connected.outbounds.remove(this); connected.inbounds.remove(this); } } /** * Removes this node and all connections to it from the graph. */ private void destroy() { for (Node<T, E> outbound : outbounds.keySet()) { outbound.inbounds.remove(this); } for (Node<T, E> inbound : inbounds.keySet()) { inbound.outbounds.remove(this); } outbounds.clear(); inbounds.clear(); graph.nodes.remove(value); } /** @return all nodes connected by an outbound edge from this node */ public Collection<Node<T, E>> getOutbounds() { return unmodifiableCollection(outbounds.keySet()); } /** @return all nodes connected by an outbound edge from this node, along with connection metadata */ public Collection<RelatedNode<T, E>> getOutboundRelations() { return unmodifiableCollection(outbounds.values()); } /** @return all nodes connected by an inbound edge to this node */ public Collection<Node<T, E>> getInbounds() { return unmodifiableCollection(inbounds.keySet()); } /** @return all nodes connected by an inbound edge to this node, along with connection metadata */ public Collection<RelatedNode<T, E>> getInboundRelations() { return unmodifiableCollection(inbounds.values()); } /** @return number of outbound edges from this node */ public int outDegree() { return outbounds.size(); } /** @return number of inbound edges to this node */ public int inDegree() { return inbounds.size(); } /** @return whether this node has an outbound or inbound edge to at least 1 other node */ public boolean isConnected() { return outDegree() > 0 || inDegree() > 0; } /** @return node value */ public T getValue() { return value; } /** * A connected {@link Node} along with additional connection metadata. * @param <T> value type * @param <E> edge type */ public static final class RelatedNode<T, E> { private final Node<T, E> node; private final E edge; private RelatedNode(Node<T, E> node, E edge) { this.node = node; this.edge = edge; } /** @return connected node */ public Node<T, E> getNode() { return node; } /** @return connection metadata */ public E getEdge() { return edge; } } } }
package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import java.sql.Timestamp; import java.util.List; import java.util.Objects; import javax.persistence.*; /** * Base of the object hierarchy for "anything that can be inside a dataverse". * * @author michael */ @NamedQueries({ @NamedQuery(name = "DvObject.findAll", query = "SELECT o FROM DvObject o ORDER BY o.id"), @NamedQuery(name = "DvObject.findById", query = "SELECT o FROM DvObject o WHERE o.id=:id"), @NamedQuery(name = "DvObject.ownedObjectsById", query="SELECT COUNT(obj) FROM DvObject obj WHERE obj.owner.id=:id") }) @Entity // Inheritance strategy "JOINED" will create 4 db tables - // the top-level dvobject, with the common columns, and the 3 child classes - // dataverse, dataset and datafile. The ids from the main table will be reused // in the child tables. (i.e., the id sequences will be "sparse" in the 3 // child tables). Tested, appears to be working properly. -- L.A. Nov. 4 2014 @Inheritance(strategy=InheritanceType.JOINED) @Table(indexes = {@Index(columnList="dtype") , @Index(columnList="owner_id") , @Index(columnList="creator_id") , @Index(columnList="releaseuser_id")}) public abstract class DvObject implements java.io.Serializable { public static final Visitor<String> NamePrinter = new Visitor<String>(){ @Override public String visit(Dataverse dv) { return dv.getName(); } @Override public String visit(Dataset ds) { return ds.getLatestVersion().getTitle(); } @Override public String visit(DataFile df) { return df.getFileMetadata().getLabel(); } }; public static final Visitor<String> NameIdPrinter = new Visitor<String>(){ @Override public String visit(Dataverse dv) { return "[" + dv.getId() + " " + dv.getName() + "]"; } @Override public String visit(Dataset ds) { return "[" + ds.getId() + " " + ds.getLatestVersion().getTitle() + "]"; } @Override public String visit(DataFile df) { return "[" + df.getId() + " " + df.getFileMetadata().getLabel() + "]"; } }; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @ManyToOne private DvObject owner; private Timestamp publicationDate; /** The user that released this dataverse */ @ManyToOne private AuthenticatedUser releaseUser; @Column( nullable = false ) private Timestamp createDate; @Column(nullable = false) private Timestamp modificationTime; /** * @todo Rename this to contentIndexTime (or something) to differentiate it * from permissionIndexTime. Content Solr docs vs. permission Solr docs. */ private Timestamp indexTime; /** * @todo Make this nullable=true. Currently we can't because the * CreateDataverseCommand saves the dataverse before it assigns a role. */ @Column(nullable = true) private Timestamp permissionModificationTime; private Timestamp permissionIndexTime; public Timestamp getModificationTime() { return modificationTime; } /** * modificationTime is used for comparison with indexTime so we know if the * Solr index is stale. * @param modificationTime */ public void setModificationTime(Timestamp modificationTime) { this.modificationTime = modificationTime; } public Timestamp getIndexTime() { return indexTime; } /** * indexTime is used for comparison with modificationTime so we know if the * Solr index is stale. * @param indexTime */ public void setIndexTime(Timestamp indexTime) { this.indexTime = indexTime; } @ManyToOne private AuthenticatedUser creator; public interface Visitor<T> { public T visit(Dataverse dv); public T visit(Dataset ds); public T visit(DataFile df); } /** * Sets the owner of the object. This is {@code protected} rather than * {@code public}, since different sub-classes have different possible owner * types: a {@link DataFile} can only have a {@link Dataset}, for example. * * @param newOwner */ protected void setOwner(DvObjectContainer newOwner) { owner = newOwner; } public DvObjectContainer getOwner() { return (DvObjectContainer)owner; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } /** * @return Whether {@code this} takes no permissions from roles assigned on its parents. */ public abstract boolean isEffectivelyPermissionRoot(); public Timestamp getPublicationDate() { return publicationDate; } public void setPublicationDate(Timestamp publicationDate) { this.publicationDate = publicationDate; } public AuthenticatedUser getReleaseUser() { return releaseUser; } public void setReleaseUser(AuthenticatedUser releaseUser) { this.releaseUser = releaseUser; } public boolean isReleased() { return publicationDate != null; } public Timestamp getCreateDate() { return createDate; } public void setCreateDate(Timestamp createDate) { this.createDate = createDate; } public AuthenticatedUser getCreator() { return creator; } public void setCreator(AuthenticatedUser creator) { this.creator = creator; } public abstract <T> T accept(Visitor<T> v); @Override public int hashCode() { return Objects.hash(getId()); } @Override public abstract boolean equals(Object o); @Override public String toString() { String classNameComps[] = getClass().getName().split("\\."); return String.format("[%s id:%d %s]", classNameComps[classNameComps.length - 1], getId(), toStringExtras()); } /** * Convenience method to add data to the default toString output. * * @return */ protected String toStringExtras() { return ""; } public abstract String getDisplayName(); // helper method used to mimic instanceof on JSF pge public boolean isInstanceofDataverse() { return this instanceof Dataverse; } public boolean isInstanceofDataset() { return this instanceof Dataset; } public boolean isInstanceofDataFile() { return this instanceof DataFile; } public Timestamp getPermissionModificationTime() { return permissionModificationTime; } public void setPermissionModificationTime(Timestamp permissionModificationTime) { this.permissionModificationTime = permissionModificationTime; } public Timestamp getPermissionIndexTime() { return permissionIndexTime; } public void setPermissionIndexTime(Timestamp permissionIndexTime) { this.permissionIndexTime = permissionIndexTime; } public Dataverse getDataverseContext() { if (this instanceof Dataverse) { return (Dataverse) this; } else if (this.getOwner() != null){ return this.getOwner().getDataverseContext(); } return null; } @OneToMany(mappedBy = "definitionPoint",cascade={ CascadeType.REMOVE, CascadeType.MERGE,CascadeType.PERSIST}, orphanRemoval=true) List<RoleAssignment> roleAssignments; }
/** * Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.broker.routing.builder; import com.linkedin.pinot.common.config.TableConfig; import com.linkedin.pinot.common.utils.CommonConstants; import com.linkedin.pinot.common.utils.LLCSegmentName; import com.linkedin.pinot.common.utils.SegmentName; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import org.apache.commons.configuration.BaseConfiguration; import org.apache.helix.model.ExternalView; import org.apache.helix.model.InstanceConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testng.Assert; import org.testng.annotations.Test; import static org.testng.Assert.*; /** * Test for the Kafka low level consumer routing table builder. */ public class LowLevelConsumerRoutingTableBuilderTest { private static final Logger LOGGER = LoggerFactory.getLogger(LowLevelConsumerRoutingTableBuilderTest.class); @Test public void testAllOnlineRoutingTable() { final int ITERATIONS = 50; Random random = new Random(); LowLevelConsumerRoutingTableBuilder routingTableBuilder = new LowLevelConsumerRoutingTableBuilder(); routingTableBuilder.init(new BaseConfiguration(), new TableConfig(), null, null); long totalNanos = 0L; for (int i = 0; i < ITERATIONS; i++) { int instanceCount = random.nextInt(12) + 3; // 3 to 14 instances int partitionCount = random.nextInt(8) + 4; // 4 to 11 partitions int replicationFactor = random.nextInt(3) + 3; // 3 to 5 replicas // Generate instances String[] instanceNames = new String[instanceCount]; for (int serverInstanceId = 0; serverInstanceId < instanceCount; serverInstanceId++) { instanceNames[serverInstanceId] = "Server_localhost_" + serverInstanceId; } // Generate partitions String[][] segmentNames = new String[partitionCount][]; int totalSegmentCount = 0; for (int partitionId = 0; partitionId < partitionCount; partitionId++) { int segmentCount = random.nextInt(32); // 0 to 31 segments in partition segmentNames[partitionId] = new String[segmentCount]; for (int sequenceNumber = 0; sequenceNumber < segmentCount; sequenceNumber++) { segmentNames[partitionId][sequenceNumber] = new LLCSegmentName("table", partitionId, sequenceNumber, System.currentTimeMillis()).getSegmentName(); } totalSegmentCount += segmentCount; } // Generate instance configurations List<InstanceConfig> instanceConfigs = new ArrayList<InstanceConfig>(); for (String instanceName : instanceNames) { InstanceConfig instanceConfig = new InstanceConfig(instanceName); instanceConfigs.add(instanceConfig); instanceConfig.getRecord().setSimpleField(CommonConstants.Helix.IS_SHUTDOWN_IN_PROGRESS, "false"); } // Generate a random external view ExternalView externalView = new ExternalView("table_REALTIME"); int[] segmentCountForInstance = new int[instanceCount]; int maxSegmentCountOnInstance = 0; for (int partitionId = 0; partitionId < segmentNames.length; partitionId++) { String[] segments = segmentNames[partitionId]; // Assign each segment for this partition for (int replicaId = 0; replicaId < replicationFactor; ++replicaId) { for (int segmentIndex = 0; segmentIndex < segments.length; segmentIndex++) { int instanceIndex = -1; int randomOffset = random.nextInt(instanceCount); // Pick the first random instance that has fewer than maxSegmentCountOnInstance segments assigned to it for (int j = 0; j < instanceCount; j++) { int potentialInstanceIndex = (j + randomOffset) % instanceCount; if (segmentCountForInstance[potentialInstanceIndex] < maxSegmentCountOnInstance) { instanceIndex = potentialInstanceIndex; break; } } // All replicas have exactly maxSegmentCountOnInstance, pick a replica and increment the max if (instanceIndex == -1) { maxSegmentCountOnInstance++; instanceIndex = randomOffset; } // Increment the segment count for the instance segmentCountForInstance[instanceIndex]++; // Add the segment to the external view externalView.setState(segmentNames[partitionId][segmentIndex], instanceNames[instanceIndex], "ONLINE"); } } } // Create routing tables long startTime = System.nanoTime(); routingTableBuilder.computeOnExternalViewChange( "table_REALTIME", externalView, instanceConfigs); List<Map<String, List<String>>> routingTables = routingTableBuilder.getRoutingTables(); long endTime = System.nanoTime(); totalNanos += endTime - startTime; // Check that all routing tables generated match all segments, with no duplicates for (Map<String, List<String>> routingTable : routingTables) { Set<String> assignedSegments = new HashSet<>(); for (List<String> segmentsForServer : routingTable.values()) { for (String segment : segmentsForServer) { assertFalse(assignedSegments.contains(segment)); assignedSegments.add(segment); } } assertEquals(assignedSegments.size(), totalSegmentCount); } } LOGGER.warn("Routing table building avg ms: " + totalNanos / (ITERATIONS * 1000000.0)); } @Test public void testMultipleConsumingSegments() { final int SEGMENT_COUNT = 10; final int ONLINE_SEGMENT_COUNT = 8; final int CONSUMING_SEGMENT_COUNT = SEGMENT_COUNT - ONLINE_SEGMENT_COUNT; LowLevelConsumerRoutingTableBuilder routingTableBuilder = new LowLevelConsumerRoutingTableBuilder(); routingTableBuilder.init(new BaseConfiguration(), new TableConfig(), null, null); List<SegmentName> segmentNames = new ArrayList<>(); for(int i = 0; i < SEGMENT_COUNT; ++i) { segmentNames.add(new LLCSegmentName("table", 0, i, System.currentTimeMillis())); } List<InstanceConfig> instanceConfigs = new ArrayList<>(); InstanceConfig instanceConfig = new InstanceConfig("Server_localhost_1234"); instanceConfigs.add(instanceConfig); instanceConfig.getRecord().setSimpleField(CommonConstants.Helix.IS_SHUTDOWN_IN_PROGRESS, "false"); // Generate an external view for a single server with some consuming segments ExternalView externalView = new ExternalView("table_REALTIME"); for (int i = 0; i < ONLINE_SEGMENT_COUNT; i++) { externalView.setState(segmentNames.get(i).getSegmentName(), "Server_localhost_1234", "ONLINE"); } for (int i = ONLINE_SEGMENT_COUNT; i < SEGMENT_COUNT; ++i) { externalView.setState(segmentNames.get(i).getSegmentName(), "Server_localhost_1234", "CONSUMING"); } routingTableBuilder.computeOnExternalViewChange("table", externalView, instanceConfigs); List<Map<String, List<String>>> routingTables = routingTableBuilder.getRoutingTables(); for (Map<String, List<String>> routingTable : routingTables) { for (List<String> segmentsForServer : routingTable.values()) { assertEquals(segmentsForServer.size(), ONLINE_SEGMENT_COUNT + 1); // Should only contain the first consuming segment, not the second assertTrue(segmentsForServer.contains(segmentNames.get(ONLINE_SEGMENT_COUNT).getSegmentName()), "Segment set does not contain the first segment in consuming state"); for (int i = ONLINE_SEGMENT_COUNT + 1; i < SEGMENT_COUNT; i++) { assertFalse(segmentsForServer.contains(segmentNames.get(i).getSegmentName()), "Segment set contains a segment in consuming state that should not be there"); } } } } @Test public void testShutdownInProgressServer() { final int SEGMENT_COUNT = 10; final int ONLINE_SEGMENT_COUNT = 8; LowLevelConsumerRoutingTableBuilder routingTableBuilder = new LowLevelConsumerRoutingTableBuilder(); routingTableBuilder.init(new BaseConfiguration(), new TableConfig(), null, null); List<SegmentName> segmentNames = new ArrayList<>(); for(int i = 0; i < SEGMENT_COUNT; ++i) { segmentNames.add(new LLCSegmentName("table", 0, i, System.currentTimeMillis())); } List<InstanceConfig> instanceConfigs = new ArrayList<>(); InstanceConfig instanceConfig = new InstanceConfig("Server_localhost_1234"); instanceConfigs.add(instanceConfig); instanceConfig.getRecord().setSimpleField(CommonConstants.Helix.IS_SHUTDOWN_IN_PROGRESS, "true"); // Generate an external view for a single server with some consuming segments ExternalView externalView = new ExternalView("table_REALTIME"); for (int i = 0; i < ONLINE_SEGMENT_COUNT; i++) { externalView.setState(segmentNames.get(i).getSegmentName(), "Server_localhost_1234", "ONLINE"); } for (int i = ONLINE_SEGMENT_COUNT; i < SEGMENT_COUNT; ++i) { externalView.setState(segmentNames.get(i).getSegmentName(), "Server_localhost_1234", "CONSUMING"); } routingTableBuilder.computeOnExternalViewChange("table", externalView, instanceConfigs); List<Map<String, List<String>>> routingTables = routingTableBuilder.getRoutingTables(); for (Map<String, List<String>> routingTable : routingTables) { Assert.assertTrue(routingTable.isEmpty()); } } }
/* * Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.File; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.sql.Connection; import java.sql.Driver; import java.sql.DriverManager; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; /** * Initiate Database Connection to create EI_ANALYTICS tables. */ public class DatabaseConnection { /** * Variable which stores the database type. */ private String dbType; /** * Variable which stores the database host. */ private String host; /** * Variable which stores the database port. */ private String port; /** * Variable which stores the database name. */ private String dbName; /** * Variable which stores the database username. */ private String user; /** * Variable which stores the database password. */ private String pass; /** * Variable which stores the database driver location. */ private String dbDriver; /** * Variable which stores the jdbcDriver. */ private String jdbcDriver; /** * Variable which stores the database URL. */ private String dbUrl; /** * Initialize Logger object to log messages */ private static final Log LOG = LogFactory.getLog(DatabaseConnection.class); /** * Represent the set of databases */ private enum DBTYPE { MYSQL, POSTGRESQL, ORACLE, MSSQL; } /** * Object of SQL connection. */ Connection connection = null; /** * Object of SQL statement. */ Statement statement = null; /** * Constructor which initiate the variables dbType,host,port,user,pass,dbDriver. * * @param dbType type of database. * @param host host of database. * @param port port of database. * @param dbName name of database. * @param user username of database. * @param pass password of database. * @param dbDriver driver location of database. */ public DatabaseConnection(String dbType, String host, String port, String dbName, String user, String pass, String dbDriver) { this.dbType = dbType; this.host = host; this.port = port; this.dbName = dbName; this.user = user; this.pass = pass; this.dbDriver = dbDriver; } /** * Getter of jdbcDriver * * @return the jdbcDriver */ public String getJdbcDriver() { return jdbcDriver; } /** * Setter of jdbcDriver * * @param jdbcDriver the jdbcDriver to set */ public void setJdbcDriver(String jdbcDriver) { this.jdbcDriver = jdbcDriver; } /** * Getter of dbUrl * * @return the dbUrl */ public String getDbUrl() { return dbUrl; } /** * Setter of dbUrl * * @param dbUrl the dbUrl to set */ public void setDbUrl(String dbUrl) { this.dbUrl = dbUrl; } /** * Create EI_ANALYTICS tables in MySQL database. */ public void createMySQLTables() { String createComponentNameTable = "CREATE TABLE ComponentNameTable (componentId varchar(254) NOT NULL," + "componentName varchar(254) DEFAULT NULL,componentType varchar(254) DEFAULT NULL," + "PRIMARY KEY (componentId),KEY ComponentNameTable_INDEX (componentType));"; String createESBEventTable = "CREATE TABLE ESBEventTable ( metaTenantId int(11) DEFAULT NULL," + " messageFlowId varchar(254) DEFAULT NULL, host varchar(254) DEFAULT NULL," + " hashCode varchar(254) DEFAULT NULL, componentName varchar(254) DEFAULT NULL," + " componentType varchar(254) DEFAULT NULL, componentIndex int(11) DEFAULT NULL," + " componentId varchar(254) DEFAULT NULL, startTime bigint(20) DEFAULT NULL," + " endTime bigint(20) DEFAULT NULL, duration bigint(20) DEFAULT NULL," + " beforePayload varchar(5000) DEFAULT NULL, afterPayload varchar(5000) DEFAULT NULL," + " contextPropertyMap varchar(5000) DEFAULT NULL, transportPropertyMap varchar(5000) DEFAULT NULL," + " children varchar(254) DEFAULT NULL, entryPoint varchar(254) DEFAULT NULL," + " entryPointHashcode varchar(254) DEFAULT NULL, faultCount int(11) DEFAULT NULL," + " eventTimestamp bigint(20) DEFAULT NULL," + " KEY ESBEventTable_INDEX (metaTenantId,messageFlowId,host,hashCode,componentName,componentType," + "componentIndex,componentId,startTime,endTime,entryPoint,entryPointHashcode,faultCount));"; String createESBStatAgg_HOURS = "CREATE TABLE ESBStatAgg_HOURS ( AGG_TIMESTAMP bigint(20) NOT NULL," + "AGG_EVENT_TIMESTAMP bigint(20) NOT NULL,metaTenantId int(11) NOT NULL," + "componentId varchar(254) NOT NULL, componentName varchar(254) NOT NULL," + "componentType varchar(254) NOT NULL,entryPoint varchar(254) NOT NULL," + "AGG_LAST_EVENT_TIMESTAMP bigint(20) DEFAULT NULL,eventTimestamp bigint(20) DEFAULT NULL," + "AGG_SUM_duration bigint(20) DEFAULT NULL,AGG_COUNT bigint(20) DEFAULT NULL," + "AGG_MIN_duration bigint(20) DEFAULT NULL,AGG_MAX_duration bigint(20) DEFAULT NULL," + "AGG_SUM_faultCount bigint(20) DEFAULT NULL,PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint));"; String createESBStatAgg_DAYS = "CREATE TABLE ESBStatAgg_DAYS ( AGG_TIMESTAMP bigint(20) NOT NULL," + "AGG_EVENT_TIMESTAMP bigint(20) NOT NULL,metaTenantId int(11) NOT NULL," + "componentId varchar(254) NOT NULL, componentName varchar(254) NOT NULL," + "componentType varchar(254) NOT NULL,entryPoint varchar(254) NOT NULL," + "AGG_LAST_EVENT_TIMESTAMP bigint(20) DEFAULT NULL,eventTimestamp bigint(20) DEFAULT NULL," + "AGG_SUM_duration bigint(20) DEFAULT NULL,AGG_COUNT bigint(20) DEFAULT NULL," + "AGG_MIN_duration bigint(20) DEFAULT NULL,AGG_MAX_duration bigint(20) DEFAULT NULL," + "AGG_SUM_faultCount bigint(20) DEFAULT NULL,PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint));"; String createESBStatAgg_MONTHS = "CREATE TABLE ESBStatAgg_MONTHS ( AGG_TIMESTAMP bigint(20) NOT NULL," + "AGG_EVENT_TIMESTAMP bigint(20) NOT NULL,metaTenantId int(11) NOT NULL," + "componentId varchar(254) NOT NULL, componentName varchar(254) NOT NULL," + "componentType varchar(254) NOT NULL,entryPoint varchar(254) NOT NULL," + "AGG_LAST_EVENT_TIMESTAMP bigint(20) DEFAULT NULL,eventTimestamp bigint(20) DEFAULT NULL," + "AGG_SUM_duration bigint(20) DEFAULT NULL,AGG_COUNT bigint(20) DEFAULT NULL," + "AGG_MIN_duration bigint(20) DEFAULT NULL,AGG_MAX_duration bigint(20) DEFAULT NULL," + "AGG_SUM_faultCount bigint(20) DEFAULT NULL,PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint));"; String createESBStatAgg_YEARS = "CREATE TABLE ESBStatAgg_YEARS ( AGG_TIMESTAMP bigint(20) NOT NULL," + "AGG_EVENT_TIMESTAMP bigint(20) NOT NULL,metaTenantId int(11) NOT NULL," + "componentId varchar(254) NOT NULL, componentName varchar(254) NOT NULL," + "componentType varchar(254) NOT NULL,entryPoint varchar(254) NOT NULL," + "AGG_LAST_EVENT_TIMESTAMP bigint(20) DEFAULT NULL,eventTimestamp bigint(20) DEFAULT NULL," + "AGG_SUM_duration bigint(20) DEFAULT NULL,AGG_COUNT bigint(20) DEFAULT NULL," + "AGG_MIN_duration bigint(20) DEFAULT NULL,AGG_MAX_duration bigint(20) DEFAULT NULL," + "AGG_SUM_faultCount bigint(20) DEFAULT NULL,PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint));"; String createMediatorStatAgg_HOURS = "CREATE TABLE MediatorStatAgg_HOURS (AGG_TIMESTAMP bigint(20) NOT NULL," + "AGG_EVENT_TIMESTAMP bigint(20) NOT NULL,metaTenantId int(11) NOT NULL," + "componentId varchar(254) NOT NULL,componentName varchar(254) NOT NULL," + "componentType varchar(254) NOT NULL,entryPoint varchar(254) NOT NULL," + "entryPointHashcode varchar(254) NOT NULL,hashCode varchar(254) NOT NULL," + "AGG_LAST_EVENT_TIMESTAMP bigint(20) DEFAULT NULL,startTime bigint(20) DEFAULT NULL," + "AGG_SUM_duration bigint(20) DEFAULT NULL,AGG_COUNT bigint(20) DEFAULT NULL," + "AGG_MIN_duration bigint(20) DEFAULT NULL,AGG_MAX_duration bigint(20) DEFAULT NULL," + "AGG_SUM_faultCount bigint(20) DEFAULT NULL,PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint,entryPointHashcode,hashCode));"; String createMediatorStatAgg_DAYS = "CREATE TABLE MediatorStatAgg_DAYS (AGG_TIMESTAMP bigint(20) NOT NULL," + "AGG_EVENT_TIMESTAMP bigint(20) NOT NULL,metaTenantId int(11) NOT NULL," + "componentId varchar(254) NOT NULL,componentName varchar(254) NOT NULL," + "componentType varchar(254) NOT NULL,entryPoint varchar(254) NOT NULL," + "entryPointHashcode varchar(254) NOT NULL,hashCode varchar(254) NOT NULL," + "AGG_LAST_EVENT_TIMESTAMP bigint(20) DEFAULT NULL,startTime bigint(20) DEFAULT NULL," + "AGG_SUM_duration bigint(20) DEFAULT NULL,AGG_COUNT bigint(20) DEFAULT NULL," + "AGG_MIN_duration bigint(20) DEFAULT NULL,AGG_MAX_duration bigint(20) DEFAULT NULL," + "AGG_SUM_faultCount bigint(20) DEFAULT NULL,PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint,entryPointHashcode,hashCode));"; String createMediatorStatAgg_MONTHS = "CREATE TABLE MediatorStatAgg_MONTHS " + "(AGG_TIMESTAMP bigint(20) NOT NULL,AGG_EVENT_TIMESTAMP bigint(20) NOT NULL," + "metaTenantId int(11) NOT NULL,componentId varchar(254) NOT NULL," + "componentName varchar(254) NOT NULL,componentType varchar(254) NOT NULL," + "entryPoint varchar(254) NOT NULL,entryPointHashcode varchar(254) NOT NULL," + "hashCode varchar(254) NOT NULL,AGG_LAST_EVENT_TIMESTAMP bigint(20) DEFAULT NULL," + "startTime bigint(20) DEFAULT NULL,AGG_SUM_duration bigint(20) DEFAULT NULL," + "AGG_COUNT bigint(20) DEFAULT NULL,AGG_MIN_duration bigint(20) DEFAULT NULL," + "AGG_MAX_duration bigint(20) DEFAULT NULL,AGG_SUM_faultCount bigint(20) DEFAULT NULL," + "PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint,entryPointHashcode,hashCode));"; String createMediatorStatAgg_YEARS = "CREATE TABLE MediatorStatAgg_YEARS " + "(AGG_TIMESTAMP bigint(20) NOT NULL,AGG_EVENT_TIMESTAMP bigint(20) NOT NULL," + "metaTenantId int(11) NOT NULL,componentId varchar(254) NOT NULL," + "componentName varchar(254) NOT NULL,componentType varchar(254) NOT NULL," + "entryPoint varchar(254) NOT NULL,entryPointHashcode varchar(254) NOT NULL," + "hashCode varchar(254) NOT NULL,AGG_LAST_EVENT_TIMESTAMP bigint(20) DEFAULT NULL," + "startTime bigint(20) DEFAULT NULL,AGG_SUM_duration bigint(20) DEFAULT NULL," + "AGG_COUNT bigint(20) DEFAULT NULL,AGG_MIN_duration bigint(20) DEFAULT NULL," + "AGG_MAX_duration bigint(20) DEFAULT NULL,AGG_SUM_faultCount bigint(20) DEFAULT NULL," + "PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint,entryPointHashcode,hashCode));"; List<String> al = new ArrayList<>(); al.add(createComponentNameTable); al.add(createESBEventTable); al.add(createESBStatAgg_HOURS); al.add(createESBStatAgg_DAYS); al.add(createESBStatAgg_MONTHS); al.add(createESBStatAgg_YEARS); al.add(createMediatorStatAgg_HOURS); al.add(createMediatorStatAgg_DAYS); al.add(createMediatorStatAgg_MONTHS); al.add(createMediatorStatAgg_YEARS); try { for (String s : al) { statement.executeUpdate(s); } LOG.info("EI_ANALYTICS tables created in MySQL"); } catch (SQLException e) { LOG.error(e); LOG.info("Drop all existing tables in the database & re-run the script"); } } /** * Create EI_ANALYTICS tables in Postgresql database. */ public void createPostgresqlTables() { String createComponentNameTable = "CREATE TABLE ComponentNameTable ( componentId varchar(254) NOT NULL," + " componentName varchar(254), componentType varchar(254), PRIMARY KEY (componentId) );"; String createComponentNameTableINDEX = "CREATE INDEX ComponentNameTable_INDEX" + " ON ComponentNameTable (componentType);"; String createESBEventTable = "CREATE TABLE ESBEventTable ( metaTenantId int, messageFlowId varchar(254)," + " host varchar(254), hashCode varchar(254), componentName varchar(254), componentType varchar(254)," + " componentIndex int, componentId varchar(254), startTime bigint, endTime bigint, duration bigint," + " beforePayload varchar(5000), afterPayload varchar(5000), contextPropertyMap varchar(5000)," + " transportPropertyMap varchar(5000), children varchar(254), entryPoint varchar(254)," + " entryPointHashcode varchar(254), faultCount int, eventTimestamp bigint );"; String createESBEventTableINDEX = "CREATE INDEX ESBEventTable_INDEX ON ESBEventTable ( metaTenantId," + " messageFlowId, host, hashCode, componentName, componentType, componentIndex, componentId," + " startTime, endTime, entryPoint, entryPointHashcode, faultCount );"; String createESBStatAgg_HOURS = "CREATE TABLE ESBStatAgg_HOURS ( AGG_TIMESTAMP bigint NOT NULL," + " AGG_EVENT_TIMESTAMP bigint NOT NULL, metaTenantId int NOT NULL," + " componentId varchar(254) NOT NULL, componentName varchar(254) NOT NULL," + " componentType varchar(254) NOT NULL, entryPoint varchar(254) NOT NULL," + " AGG_LAST_EVENT_TIMESTAMP bigint DEFAULT NULL, eventTimestamp bigint DEFAULT NULL," + " AGG_SUM_duration bigint DEFAULT NULL, AGG_COUNT bigint DEFAULT NULL," + " AGG_MIN_duration bigint DEFAULT NULL, AGG_MAX_duration bigint DEFAULT NULL," + " AGG_SUM_faultCount bigint DEFAULT NULL, PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint) );"; String createESBStatAgg_DAYS = "CREATE TABLE ESBStatAgg_DAYS ( AGG_TIMESTAMP bigint NOT NULL," + " AGG_EVENT_TIMESTAMP bigint NOT NULL, metaTenantId int NOT NULL," + " componentId varchar(254) NOT NULL, componentName varchar(254) NOT NULL," + " componentType varchar(254) NOT NULL, entryPoint varchar(254) NOT NULL," + " AGG_LAST_EVENT_TIMESTAMP bigint DEFAULT NULL, eventTimestamp bigint DEFAULT NULL," + " AGG_SUM_duration bigint DEFAULT NULL, AGG_COUNT bigint DEFAULT NULL," + " AGG_MIN_duration bigint DEFAULT NULL, AGG_MAX_duration bigint DEFAULT NULL," + " AGG_SUM_faultCount bigint DEFAULT NULL, PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint) );"; String createESBStatAgg_MONTHS = "CREATE TABLE ESBStatAgg_MONTHS ( AGG_TIMESTAMP bigint NOT NULL," + " AGG_EVENT_TIMESTAMP bigint NOT NULL, metaTenantId int NOT NULL," + " componentId varchar(254) NOT NULL, componentName varchar(254) NOT NULL," + " componentType varchar(254) NOT NULL, entryPoint varchar(254) NOT NULL," + " AGG_LAST_EVENT_TIMESTAMP bigint DEFAULT NULL, eventTimestamp bigint DEFAULT NULL," + " AGG_SUM_duration bigint DEFAULT NULL, AGG_COUNT bigint DEFAULT NULL," + " AGG_MIN_duration bigint DEFAULT NULL, AGG_MAX_duration bigint DEFAULT NULL," + " AGG_SUM_faultCount bigint DEFAULT NULL, PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint) );"; String createESBStatAgg_YEARS = "CREATE TABLE ESBStatAgg_YEARS ( AGG_TIMESTAMP bigint NOT NULL," + " AGG_EVENT_TIMESTAMP bigint NOT NULL, metaTenantId int NOT NULL," + " componentId varchar(254) NOT NULL, componentName varchar(254) NOT NULL," + " componentType varchar(254) NOT NULL, entryPoint varchar(254) NOT NULL," + " AGG_LAST_EVENT_TIMESTAMP bigint DEFAULT NULL, eventTimestamp bigint DEFAULT NULL," + " AGG_SUM_duration bigint DEFAULT NULL, AGG_COUNT bigint DEFAULT NULL," + " AGG_MIN_duration bigint DEFAULT NULL, AGG_MAX_duration bigint DEFAULT NULL," + " AGG_SUM_faultCount bigint DEFAULT NULL, PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP," + "metaTenantId,componentId,componentType,componentName,entryPoint) );"; String createMediatorStatAgg_HOURS = "CREATE TABLE MediatorStatAgg_HOURS ( AGG_TIMESTAMP bigint," + " AGG_EVENT_TIMESTAMP bigint, metaTenantId int, componentId varchar(254), componentName varchar(254)," + " componentType varchar(254), entryPoint varchar(254), entryPointHashcode varchar(254)," + " hashCode varchar(254), AGG_LAST_EVENT_TIMESTAMP bigint, startTime bigint," + " AGG_SUM_duration bigint, AGG_COUNT bigint, AGG_MIN_duration bigint, AGG_MAX_duration bigint," + " AGG_SUM_faultCount bigint, PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId," + "componentId,componentType,componentName,entryPoint,entryPointHashcode,hashCode) );"; String createMediatorStatAgg_DAYS = "CREATE TABLE MediatorStatAgg_DAYS ( AGG_TIMESTAMP bigint," + " AGG_EVENT_TIMESTAMP bigint, metaTenantId int, componentId varchar(254), componentName varchar(254)," + " componentType varchar(254), entryPoint varchar(254), entryPointHashcode varchar(254)," + " hashCode varchar(254), AGG_LAST_EVENT_TIMESTAMP bigint, startTime bigint, AGG_SUM_duration bigint," + " AGG_COUNT bigint, AGG_MIN_duration bigint, AGG_MAX_duration bigint, AGG_SUM_faultCount bigint," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint,entryPointHashcode,hashCode) );"; String createMediatorStatAgg_MONTHS = "CREATE TABLE MediatorStatAgg_MONTHS ( AGG_TIMESTAMP bigint," + " AGG_EVENT_TIMESTAMP bigint, metaTenantId int, componentId varchar(254)," + " componentName varchar(254), componentType varchar(254), entryPoint varchar(254)," + " entryPointHashcode varchar(254), hashCode varchar(254), AGG_LAST_EVENT_TIMESTAMP bigint," + " startTime bigint, AGG_SUM_duration bigint, AGG_COUNT bigint, AGG_MIN_duration bigint," + " AGG_MAX_duration bigint, AGG_SUM_faultCount bigint," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint,entryPointHashcode,hashCode) );"; String createMediatorStatAgg_YEARS = "CREATE TABLE MediatorStatAgg_YEARS ( AGG_TIMESTAMP bigint," + " AGG_EVENT_TIMESTAMP bigint, metaTenantId int, componentId varchar(254), componentName varchar(254)," + " componentType varchar(254), entryPoint varchar(254), entryPointHashcode varchar(254)," + " hashCode varchar(254), AGG_LAST_EVENT_TIMESTAMP bigint, startTime bigint," + " AGG_SUM_duration bigint, AGG_COUNT bigint, AGG_MIN_duration bigint, AGG_MAX_duration bigint," + " AGG_SUM_faultCount bigint, PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId," + "componentId,componentType,componentName,entryPoint,entryPointHashcode,hashCode) );"; List<String> al = new ArrayList<>(); al.add(createComponentNameTable); al.add(createComponentNameTableINDEX); al.add(createESBEventTable); al.add(createESBEventTableINDEX); al.add(createESBStatAgg_HOURS); al.add(createESBStatAgg_DAYS); al.add(createESBStatAgg_MONTHS); al.add(createESBStatAgg_YEARS); al.add(createMediatorStatAgg_HOURS); al.add(createMediatorStatAgg_DAYS); al.add(createMediatorStatAgg_MONTHS); al.add(createMediatorStatAgg_YEARS); try { for (String s : al) { statement.executeUpdate(s); } LOG.info("EI_ANALYTICS tables created in Postgresql"); } catch (SQLException e) { LOG.error(e); LOG.info("Drop all existing tables in the database & re-run the script"); } } /** * Create EI_ANALYTICS tables in Oracle database. */ public void createOracleTables() { String createComponentNameTable = "create table COMPONENTNAMETABLE ( COMPONENTID VARCHAR2(254) PRIMARY KEY," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254) )"; String createComponentNameTableINDEX = "create index COMPONENTNAMETABLE_INDEX" + " on COMPONENTNAMETABLE (COMPONENTTYPE)"; String createESBEventTable = "create table ESBEVENTTABLE ( METATENANTID NUMBER(10)," + " MESSAGEFLOWID VARCHAR2(254), HOST VARCHAR2(254), HASHCODE VARCHAR2(254)," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254), COMPONENTINDEX NUMBER(10)," + " COMPONENTID VARCHAR2(254), STARTTIME NUMBER(19), ENDTIME NUMBER(19), DURATION NUMBER(19)," + " BEFOREPAYLOAD CLOB, AFTERPAYLOAD CLOB, CONTEXTPROPERTYMAP CLOB, TRANSPORTPROPERTYMAP CLOB," + " CHILDREN VARCHAR2(254), ENTRYPOINT VARCHAR2(254), ENTRYPOINTHASHCODE VARCHAR2(254)," + " FAULTCOUNT NUMBER(10), EVENTTIMESTAMP NUMBER(19))"; String createESBEventTableINDEX = "create index ESBEVENTTABLE_INDEX on ESBEVENTTABLE (METATENANTID," + "MESSAGEFLOWID,HOST,HASHCODE,COMPONENTNAME,COMPONENTTYPE,COMPONENTINDEX,COMPONENTID,STARTTIME," + "ENDTIME,ENTRYPOINT,ENTRYPOINTHASHCODE,FAULTCOUNT)"; String createESBStatAgg_HOURS = "create table ESBSTATAGG_HOURS ( AGG_TIMESTAMP NUMBER(19)," + " AGG_EVENT_TIMESTAMP NUMBER(19), METATENANTID NUMBER(10), COMPONENTID VARCHAR2(254)," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254), ENTRYPOINT VARCHAR2(254)," + " AGG_LAST_EVENT_TIMESTAMP NUMBER(19), EVENTTIMESTAMP NUMBER(19), AGG_SUM_DURATION NUMBER(19)," + " AGG_COUNT NUMBER(19), AGG_MIN_DURATION NUMBER(19), AGG_MAX_DURATION NUMBER(19)," + " AGG_SUM_FAULTCOUNT NUMBER(19), PRIMARY KEY ( AGG_TIMESTAMP, AGG_EVENT_TIMESTAMP," + " METATENANTID, COMPONENTID, COMPONENTNAME, COMPONENTTYPE, ENTRYPOINT))"; String createESBStatAgg_DAYS = "create table ESBSTATAGG_DAYS ( AGG_TIMESTAMP NUMBER(19)," + " AGG_EVENT_TIMESTAMP NUMBER(19), METATENANTID NUMBER(10), COMPONENTID VARCHAR2(254)," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254), ENTRYPOINT VARCHAR2(254)," + " AGG_LAST_EVENT_TIMESTAMP NUMBER(19), EVENTTIMESTAMP NUMBER(19), AGG_SUM_DURATION NUMBER(19)," + " AGG_COUNT NUMBER(19), AGG_MIN_DURATION NUMBER(19), AGG_MAX_DURATION NUMBER(19)," + " AGG_SUM_FAULTCOUNT NUMBER(19), PRIMARY KEY ( AGG_TIMESTAMP, AGG_EVENT_TIMESTAMP, METATENANTID," + " COMPONENTID, COMPONENTNAME, COMPONENTTYPE, ENTRYPOINT))"; String createESBStatAgg_MONTHS = "create table ESBSTATAGG_MONTHS ( AGG_TIMESTAMP NUMBER(19)," + " AGG_EVENT_TIMESTAMP NUMBER(19), METATENANTID NUMBER(10), COMPONENTID VARCHAR2(254)," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254), ENTRYPOINT VARCHAR2(254)," + " AGG_LAST_EVENT_TIMESTAMP NUMBER(19), EVENTTIMESTAMP NUMBER(19), AGG_SUM_DURATION NUMBER(19)," + " AGG_COUNT NUMBER(19), AGG_MIN_DURATION NUMBER(19), AGG_MAX_DURATION NUMBER(19)," + " AGG_SUM_FAULTCOUNT NUMBER(19), PRIMARY KEY ( AGG_TIMESTAMP, AGG_EVENT_TIMESTAMP, METATENANTID," + " COMPONENTID, COMPONENTNAME, COMPONENTTYPE, ENTRYPOINT))"; String createESBStatAgg_YEARS = "create table ESBSTATAGG_YEARS ( AGG_TIMESTAMP NUMBER(19)," + " AGG_EVENT_TIMESTAMP NUMBER(19), METATENANTID NUMBER(10), COMPONENTID VARCHAR2(254)," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254), ENTRYPOINT VARCHAR2(254)," + " AGG_LAST_EVENT_TIMESTAMP NUMBER(19), EVENTTIMESTAMP NUMBER(19), AGG_SUM_DURATION NUMBER(19)," + " AGG_COUNT NUMBER(19), AGG_MIN_DURATION NUMBER(19), AGG_MAX_DURATION NUMBER(19)," + " AGG_SUM_FAULTCOUNT NUMBER(19), PRIMARY KEY ( AGG_TIMESTAMP, AGG_EVENT_TIMESTAMP, METATENANTID," + " COMPONENTID, COMPONENTNAME, COMPONENTTYPE, ENTRYPOINT))"; String createMediatorStatAgg_HOURS = "create table MEDIATORSTATAGG_HOURS ( AGG_TIMESTAMP NUMBER(19)," + " AGG_EVENT_TIMESTAMP NUMBER(19), METATENANTID NUMBER(10), COMPONENTID VARCHAR2(254)," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254), ENTRYPOINT VARCHAR2(254)," + " ENTRYPOINTHASHCODE VARCHAR2(254), HASHCODE VARCHAR2(254), AGG_LAST_EVENT_TIMESTAMP NUMBER(19)," + " STARTTIME NUMBER(19), AGG_SUM_DURATION NUMBER(19), AGG_COUNT NUMBER(19)," + " AGG_MIN_DURATION NUMBER(19), AGG_MAX_DURATION NUMBER(19)," + " AGG_SUM_FAULTCOUNT NUMBER(19), PRIMARY KEY ( AGG_TIMESTAMP, AGG_EVENT_TIMESTAMP, METATENANTID," + " COMPONENTID, COMPONENTNAME, COMPONENTTYPE, ENTRYPOINT, ENTRYPOINTHASHCODE, HASHCODE))"; String createMediatorStatAgg_DAYS = "create table MEDIATORSTATAGG_DAYS ( AGG_TIMESTAMP NUMBER(19)," + " AGG_EVENT_TIMESTAMP NUMBER(19), METATENANTID NUMBER(10), COMPONENTID VARCHAR2(254)," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254), ENTRYPOINT VARCHAR2(254)," + " ENTRYPOINTHASHCODE VARCHAR2(254), HASHCODE VARCHAR2(254), AGG_LAST_EVENT_TIMESTAMP NUMBER(19)," + " STARTTIME NUMBER(19), AGG_SUM_DURATION NUMBER(19), AGG_COUNT NUMBER(19)," + " AGG_MIN_DURATION NUMBER(19), AGG_MAX_DURATION NUMBER(19)," + " AGG_SUM_FAULTCOUNT NUMBER(19), PRIMARY KEY ( AGG_TIMESTAMP, AGG_EVENT_TIMESTAMP, METATENANTID," + " COMPONENTID, COMPONENTNAME, COMPONENTTYPE, ENTRYPOINT, ENTRYPOINTHASHCODE, HASHCODE))"; String createMediatorStatAgg_MONTHS = "create table MEDIATORSTATAGG_MONTHS ( AGG_TIMESTAMP NUMBER(19)," + " AGG_EVENT_TIMESTAMP NUMBER(19), METATENANTID NUMBER(10), COMPONENTID VARCHAR2(254)," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254), ENTRYPOINT VARCHAR2(254)," + " ENTRYPOINTHASHCODE VARCHAR2(254), HASHCODE VARCHAR2(254), AGG_LAST_EVENT_TIMESTAMP NUMBER(19)," + " STARTTIME NUMBER(19), AGG_SUM_DURATION NUMBER(19), AGG_COUNT NUMBER(19)," + " AGG_MIN_DURATION NUMBER(19), AGG_MAX_DURATION NUMBER(19)," + " AGG_SUM_FAULTCOUNT NUMBER(19), PRIMARY KEY ( AGG_TIMESTAMP, AGG_EVENT_TIMESTAMP, METATENANTID," + " COMPONENTID, COMPONENTNAME, COMPONENTTYPE, ENTRYPOINT, ENTRYPOINTHASHCODE, HASHCODE))"; String createMediatorStatAgg_YEARS = "create table MEDIATORSTATAGG_YEARS ( AGG_TIMESTAMP NUMBER(19)," + " AGG_EVENT_TIMESTAMP NUMBER(19), METATENANTID NUMBER(10), COMPONENTID VARCHAR2(254)," + " COMPONENTNAME VARCHAR2(254), COMPONENTTYPE VARCHAR2(254), ENTRYPOINT VARCHAR2(254)," + " ENTRYPOINTHASHCODE VARCHAR2(254), HASHCODE VARCHAR2(254), AGG_LAST_EVENT_TIMESTAMP NUMBER(19)," + " STARTTIME NUMBER(19), AGG_SUM_DURATION NUMBER(19), AGG_COUNT NUMBER(19)," + " AGG_MIN_DURATION NUMBER(19), AGG_MAX_DURATION NUMBER(19)," + " AGG_SUM_FAULTCOUNT NUMBER(19), PRIMARY KEY ( AGG_TIMESTAMP, AGG_EVENT_TIMESTAMP, METATENANTID," + " COMPONENTID, COMPONENTNAME, COMPONENTTYPE, ENTRYPOINT, ENTRYPOINTHASHCODE, HASHCODE))"; List<String> al = new ArrayList<>(); al.add(createComponentNameTable); al.add(createComponentNameTableINDEX); al.add(createESBEventTable); al.add(createESBEventTableINDEX); al.add(createESBStatAgg_HOURS); al.add(createESBStatAgg_DAYS); al.add(createESBStatAgg_MONTHS); al.add(createESBStatAgg_YEARS); al.add(createMediatorStatAgg_HOURS); al.add(createMediatorStatAgg_DAYS); al.add(createMediatorStatAgg_MONTHS); al.add(createMediatorStatAgg_YEARS); try { for (String s : al) { statement.executeUpdate(s); } LOG.info("EI_ANALYTICS tables created in Oracle"); } catch (SQLException e) { LOG.error(e); LOG.info("Drop all existing tables in the database & re-run the script"); } } /** * Create EI_ANALYTICS tables in Mssql database. */ public void createMssqlTables() { String createComponentNameTable = "CREATE TABLE ComponentNameTable ( componentId varchar(254) NOT NULL," + " componentName varchar(254), componentType varchar(254), PRIMARY KEY (componentId) );"; String createComponentNameTableINDEX = "CREATE INDEX ComponentNameTable_INDEX" + " ON ComponentNameTable (componentType);"; String createESBEventTable = "CREATE TABLE ESBEventTable ( metaTenantId int, messageFlowId varchar(254)," + " host varchar(254), hashCode varchar(254), componentName varchar(254), componentType varchar(254)," + " componentIndex int, componentId varchar(254), startTime bigint, endTime bigint, duration bigint," + " beforePayload varchar(5000), afterPayload varchar(5000), contextPropertyMap varchar(5000)," + " transportPropertyMap varchar(5000), children varchar(254), entryPoint varchar(254)," + " entryPointHashcode varchar(254), faultCount int, eventTimestamp bigint );"; String createESBEventTableINDEX = "CREATE INDEX ESBEventTable_INDEX ON ESBEventTable ( metaTenantId," + " messageFlowId, host, hashCode, componentName, componentType, componentIndex, componentId," + " startTime, endTime, entryPoint, entryPointHashcode, faultCount );"; String createESBStatAgg_HOURS = "CREATE TABLE ESBStatAgg_HOURS ( AGG_TIMESTAMP bigint NOT NULL," + " AGG_EVENT_TIMESTAMP bigint NOT NULL, metaTenantId int NOT NULL, componentId varchar(254) NOT NULL," + " componentName varchar(254) NOT NULL, componentType varchar(254) NOT NULL," + " entryPoint varchar(254) NOT NULL, AGG_LAST_EVENT_TIMESTAMP bigint DEFAULT NULL," + " eventTimestamp bigint DEFAULT NULL, AGG_SUM_duration bigint DEFAULT NULL," + " AGG_COUNT bigint DEFAULT NULL, AGG_MIN_duration bigint DEFAULT NULL," + " AGG_MAX_duration bigint DEFAULT NULL, AGG_SUM_faultCount bigint DEFAULT NULL," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint) );"; String createESBStatAgg_DAYS = "CREATE TABLE ESBStatAgg_DAYS ( AGG_TIMESTAMP bigint NOT NULL," + " AGG_EVENT_TIMESTAMP bigint NOT NULL, metaTenantId int NOT NULL, componentId varchar(254) NOT NULL," + " componentName varchar(254) NOT NULL, componentType varchar(254) NOT NULL," + " entryPoint varchar(254) NOT NULL, AGG_LAST_EVENT_TIMESTAMP bigint DEFAULT NULL," + " eventTimestamp bigint DEFAULT NULL, AGG_SUM_duration bigint DEFAULT NULL," + " AGG_COUNT bigint DEFAULT NULL, AGG_MIN_duration bigint DEFAULT NULL," + " AGG_MAX_duration bigint DEFAULT NULL, AGG_SUM_faultCount bigint DEFAULT NULL," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint) );"; String createESBStatAgg_MONTHS = "CREATE TABLE ESBStatAgg_MONTHS ( AGG_TIMESTAMP bigint NOT NULL," + " AGG_EVENT_TIMESTAMP bigint NOT NULL, metaTenantId int NOT NULL, componentId varchar(254) NOT NULL," + " componentName varchar(254) NOT NULL, componentType varchar(254) NOT NULL," + " entryPoint varchar(254) NOT NULL, AGG_LAST_EVENT_TIMESTAMP bigint DEFAULT NULL," + " eventTimestamp bigint DEFAULT NULL, AGG_SUM_duration bigint DEFAULT NULL," + " AGG_COUNT bigint DEFAULT NULL, AGG_MIN_duration bigint DEFAULT NULL," + " AGG_MAX_duration bigint DEFAULT NULL, AGG_SUM_faultCount bigint DEFAULT NULL," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint) );"; String createESBStatAgg_YEARS = "CREATE TABLE ESBStatAgg_YEARS ( AGG_TIMESTAMP bigint NOT NULL," + " AGG_EVENT_TIMESTAMP bigint NOT NULL, metaTenantId int NOT NULL, componentId varchar(254) NOT NULL," + " componentName varchar(254) NOT NULL, componentType varchar(254) NOT NULL," + " entryPoint varchar(254) NOT NULL, AGG_LAST_EVENT_TIMESTAMP bigint DEFAULT NULL," + " eventTimestamp bigint DEFAULT NULL, AGG_SUM_duration bigint DEFAULT NULL," + " AGG_COUNT bigint DEFAULT NULL, AGG_MIN_duration bigint DEFAULT NULL," + " AGG_MAX_duration bigint DEFAULT NULL, AGG_SUM_faultCount bigint DEFAULT NULL," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint) );"; String createMediatorStatAgg_HOURS = "CREATE TABLE MediatorStatAgg_HOURS ( AGG_TIMESTAMP bigint," + " AGG_EVENT_TIMESTAMP bigint, metaTenantId int, componentId varchar(254)," + " componentName varchar(254), componentType varchar(254), entryPoint varchar(254)," + " entryPointHashcode varchar(254), hashCode varchar(254), AGG_LAST_EVENT_TIMESTAMP bigint," + " startTime bigint, AGG_SUM_duration bigint, AGG_COUNT bigint, AGG_MIN_duration bigint," + " AGG_MAX_duration bigint, AGG_SUM_faultCount bigint," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint,entryPointHashcode,hashCode) );"; String createMediatorStatAgg_DAYS = "CREATE TABLE MediatorStatAgg_DAYS ( AGG_TIMESTAMP bigint," + " AGG_EVENT_TIMESTAMP bigint, metaTenantId int, componentId varchar(254), componentName varchar(254)," + " componentType varchar(254), entryPoint varchar(254), entryPointHashcode varchar(254)," + " hashCode varchar(254), AGG_LAST_EVENT_TIMESTAMP bigint, startTime bigint, AGG_SUM_duration bigint," + " AGG_COUNT bigint, AGG_MIN_duration bigint, AGG_MAX_duration bigint, AGG_SUM_faultCount bigint," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint,entryPointHashcode,hashCode) );"; String createMediatorStatAgg_MONTHS = "CREATE TABLE MediatorStatAgg_MONTHS ( AGG_TIMESTAMP bigint," + " AGG_EVENT_TIMESTAMP bigint, metaTenantId int, componentId varchar(254), componentName varchar(254)," + " componentType varchar(254), entryPoint varchar(254), entryPointHashcode varchar(254)," + " hashCode varchar(254), AGG_LAST_EVENT_TIMESTAMP bigint, startTime bigint, AGG_SUM_duration bigint," + " AGG_COUNT bigint, AGG_MIN_duration bigint, AGG_MAX_duration bigint, AGG_SUM_faultCount bigint," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint,entryPointHashcode,hashCode) );"; String createMediatorStatAgg_YEARS = "CREATE TABLE MediatorStatAgg_YEARS ( AGG_TIMESTAMP bigint," + " AGG_EVENT_TIMESTAMP bigint, metaTenantId int, componentId varchar(254), componentName varchar(254)," + " componentType varchar(254), entryPoint varchar(254), entryPointHashcode varchar(254)," + " hashCode varchar(254), AGG_LAST_EVENT_TIMESTAMP bigint, startTime bigint, AGG_SUM_duration bigint," + " AGG_COUNT bigint, AGG_MIN_duration bigint, AGG_MAX_duration bigint, AGG_SUM_faultCount bigint," + " PRIMARY KEY (AGG_TIMESTAMP,AGG_EVENT_TIMESTAMP,metaTenantId,componentId,componentType," + "componentName,entryPoint,entryPointHashcode,hashCode) );"; List<String> al = new ArrayList<>(); al.add(createComponentNameTable); al.add(createComponentNameTableINDEX); al.add(createESBEventTable); al.add(createESBEventTableINDEX); al.add(createESBStatAgg_HOURS); al.add(createESBStatAgg_DAYS); al.add(createESBStatAgg_MONTHS); al.add(createESBStatAgg_YEARS); al.add(createMediatorStatAgg_HOURS); al.add(createMediatorStatAgg_DAYS); al.add(createMediatorStatAgg_MONTHS); al.add(createMediatorStatAgg_YEARS); try { for (String s : al) { statement.executeUpdate(s); } LOG.info("EI_ANALYTICS tables created in Mssql"); } catch (SQLException e) { LOG.error(e); LOG.info("Drop all existing tables in the database & re-run the script"); } } /** * Load JDBC driver. Create database connection. Create EI_ANALYTICS tables. */ public void connect() { LOG.info("Starting migration process..."); switch (DBTYPE.valueOf(dbType)) { case MYSQL: setJdbcDriver("com.mysql.jdbc.Driver"); setDbUrl("jdbc:mysql://" + host + ":" + port + "/" + dbName + "?allowMultiQueries=true"); LOG.info("Set JDBC driver & Database URL"); break; case POSTGRESQL: setJdbcDriver("org.postgresql.Driver"); setDbUrl("jdbc:postgresql://" + host + ":" + port + "/" + dbName + "?allowMultiQueries=true"); LOG.info("Set JDBC driver & Database URL"); break; case ORACLE: setJdbcDriver("oracle.jdbc.driver.OracleDriver"); setDbUrl("jdbc:oracle:thin:@" + host + ":" + port + ":" + dbName); LOG.info("Set JDBC driver & Database URL"); break; case MSSQL: setJdbcDriver("com.microsoft.sqlserver.jdbc.SQLServerDriver"); setDbUrl("jdbc:sqlserver://" + host + ":" + port + ";databaseName=" + dbName); LOG.info("Set JDBC driver & Database URL"); break; default: LOG.info("Invalid Database Type"); } try { // An abstract representation of file and directory path names. // This creates a new File instance by converting the given pathname // string into an abstract pathname. File file = new File(dbDriver); URL url = null; // This method constructs a file : URI that represents this abstract pathname. url = file.toURI().toURL(); URLClassLoader ucl = new URLClassLoader(new URL[]{url}); Driver driver = null; LOG.info("Attempting to load driver..."); driver = (Driver) Class.forName(jdbcDriver, true, ucl).newInstance(); DriverManager.registerDriver(new DriverWrapper(driver)); LOG.info("Driver Loaded"); LOG.info("Attempting to establish connection to the selected database..."); connection = DriverManager.getConnection(dbUrl, user, pass); LOG.info("Connection established"); LOG.info("Attempting to create tables in the given database..."); statement = connection.createStatement(); switch (DBTYPE.valueOf(dbType)) { case MYSQL: createMySQLTables(); break; case POSTGRESQL: createPostgresqlTables(); break; case ORACLE: createOracleTables(); break; case MSSQL: createMssqlTables(); break; default: LOG.info("Invalid Database Type"); } } catch (InstantiationException e) { LOG.error(e); } catch (IllegalAccessException e) { LOG.error(e); } catch (MalformedURLException e) { LOG.error(String.format("Error occurred while opening url, %s", e)); } catch (ClassNotFoundException e) { LOG.error(String.format("Error occurred while loading driver class, %s", e)); System.exit(1); } catch (SQLException e) { LOG.error(String.format("Error occurred while making connection to the database, %s", e)); } finally { if (statement != null) { try { statement.close(); } catch (SQLException e) { LOG.error(e); } } if (connection != null) { try { connection.close(); } catch (SQLException e) { LOG.error(e); } } } } /** * Main class of the programme. * * @param args List of command line arguments. */ public static void main(String[] args) { DatabaseConnection connection = new DatabaseConnection(args[0], args[1], args[2], args[3], args[4], args[5], args[6]); connection.connect(); } }
/* * Copyright (C) 2009 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.cache; import static com.google.common.base.Objects.firstNonNull; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.base.Ascii; import com.google.common.base.Equivalence; import com.google.common.base.Objects; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.base.Ticker; import com.google.common.cache.AbstractCache.SimpleStatsCounter; import com.google.common.cache.AbstractCache.StatsCounter; import com.google.common.cache.LocalCache.Strength; import java.lang.ref.SoftReference; import java.lang.ref.WeakReference; import java.util.ConcurrentModificationException; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.CheckReturnValue; /** * <p>A builder of {@link LoadingCache} and {@link Cache} instances having any combination of the * following features: * * <ul> * <li>automatic loading of entries into the cache * <li>least-recently-used eviction when a maximum size is exceeded * <li>time-based expiration of entries, measured since last access or last write * <li>keys automatically wrapped in {@linkplain WeakReference weak} references * <li>values automatically wrapped in {@linkplain WeakReference weak} or * {@linkplain SoftReference soft} references * <li>notification of evicted (or otherwise removed) entries * <li>accumulation of cache access statistics * </ul> * * These features are all optional; caches can be created using all or none of them. By default * cache instances created by {@code CacheBuilder} will not perform any type of eviction. * * <p>Usage example: <pre> {@code * * LoadingCache<Key, Graph> graphs = CacheBuilder.newBuilder() * .maximumSize(10000) * .expireAfterWrite(10, TimeUnit.MINUTES) * .removalListener(MY_LISTENER) * .build( * new CacheLoader<Key, Graph>() { * public Graph load(Key key) throws AnyException { * return createExpensiveGraph(key); * } * });}</pre> * * Or equivalently, <pre> {@code * * // In real life this would come from a command-line flag or config file * String spec = "maximumSize=10000,expireAfterWrite=10m"; * * LoadingCache<Key, Graph> graphs = CacheBuilder.from(spec) * .removalListener(MY_LISTENER) * .build( * new CacheLoader<Key, Graph>() { * public Graph load(Key key) throws AnyException { * return createExpensiveGraph(key); * } * });}</pre> * * <p>The returned cache is implemented as a hash table with similar performance characteristics to * {@link ConcurrentHashMap}. It implements all optional operations of the {@link LoadingCache} and * {@link Cache} interfaces. The {@code asMap} view (and its collection views) have <i>weakly * consistent iterators</i>. This means that they are safe for concurrent use, but if other threads * modify the cache after the iterator is created, it is undefined which of these changes, if any, * are reflected in that iterator. These iterators never throw {@link * ConcurrentModificationException}. * * <p><b>Note:</b> by default, the returned cache uses equality comparisons (the * {@link Object#equals equals} method) to determine equality for keys or values. However, if * {@link #weakKeys} was specified, the cache uses identity ({@code ==}) * comparisons instead for keys. Likewise, if {@link #weakValues} or {@link #softValues} was * specified, the cache uses identity comparisons for values. * * <p>Entries are automatically evicted from the cache when any of * {@linkplain #maximumSize(long) maximumSize}, {@linkplain #maximumWeight(long) maximumWeight}, * {@linkplain #expireAfterWrite expireAfterWrite}, * {@linkplain #expireAfterAccess expireAfterAccess}, {@linkplain #weakKeys weakKeys}, * {@linkplain #weakValues weakValues}, or {@linkplain #softValues softValues} are requested. * * <p>If {@linkplain #maximumSize(long) maximumSize} or * {@linkplain #maximumWeight(long) maximumWeight} is requested entries may be evicted on each cache * modification. * * <p>If {@linkplain #expireAfterWrite expireAfterWrite} or * {@linkplain #expireAfterAccess expireAfterAccess} is requested entries may be evicted on each * cache modification, on occasional cache accesses, or on calls to {@link Cache#cleanUp}. Expired * entries may be counted in {@link Cache#size}, but will never be visible to read or write * operations. * * <p>If {@linkplain #weakKeys weakKeys}, {@linkplain #weakValues weakValues}, or * {@linkplain #softValues softValues} are requested, it is possible for a key or value present in * the cache to be reclaimed by the garbage collector. Entries with reclaimed keys or values may be * removed from the cache on each cache modification, on occasional cache accesses, or on calls to * {@link Cache#cleanUp}; such entries may be counted in {@link Cache#size}, but will never be * visible to read or write operations. * * <p>Certain cache configurations will result in the accrual of periodic maintenance tasks which * will be performed during write operations, or during occasional read operations in the absense of * writes. The {@link Cache#cleanUp} method of the returned cache will also perform maintenance, but * calling it should not be necessary with a high throughput cache. Only caches built with * {@linkplain #removalListener removalListener}, {@linkplain #expireAfterWrite expireAfterWrite}, * {@linkplain #expireAfterAccess expireAfterAccess}, {@linkplain #weakKeys weakKeys}, * {@linkplain #weakValues weakValues}, or {@linkplain #softValues softValues} perform periodic * maintenance. * * <p>The caches produced by {@code CacheBuilder} are serializable, and the deserialized caches * retain all the configuration properties of the original cache. Note that the serialized form does * <i>not</i> include cache contents, but only configuration. * * <p>See the Guava User Guide article on <a href= * "http://code.google.com/p/guava-libraries/wiki/CachesExplained">caching</a> for a higher-level * explanation. * * @param <K> the base key type for all caches created by this builder * @param <V> the base value type for all caches created by this builder * @author Charles Fry * @author Kevin Bourrillion * @since 10.0 */ @GwtCompatible(emulated = true) public final class CacheBuilder<K, V> { private static final int DEFAULT_INITIAL_CAPACITY = 16; private static final int DEFAULT_CONCURRENCY_LEVEL = 4; private static final int DEFAULT_EXPIRATION_NANOS = 0; private static final int DEFAULT_REFRESH_NANOS = 0; static final Supplier<? extends StatsCounter> NULL_STATS_COUNTER = Suppliers.ofInstance( new StatsCounter() { @Override public void recordHits(int count) {} @Override public void recordMisses(int count) {} @Override public void recordLoadSuccess(long loadTime) {} @Override public void recordLoadException(long loadTime) {} @Override public void recordEviction() {} @Override public CacheStats snapshot() { return EMPTY_STATS; } }); static final CacheStats EMPTY_STATS = new CacheStats(0, 0, 0, 0, 0, 0); static final Supplier<StatsCounter> CACHE_STATS_COUNTER = new Supplier<StatsCounter>() { @Override public StatsCounter get() { return new SimpleStatsCounter(); } }; enum NullListener implements RemovalListener<Object, Object> { INSTANCE; @Override public void onRemoval(RemovalNotification<Object, Object> notification) {} } enum OneWeigher implements Weigher<Object, Object> { INSTANCE; @Override public int weigh(Object key, Object value) { return 1; } } static final Ticker NULL_TICKER = new Ticker() { @Override public long read() { return 0; } }; private static final Logger logger = Logger.getLogger(CacheBuilder.class.getName()); static final int UNSET_INT = -1; boolean strictParsing = true; int initialCapacity = UNSET_INT; int concurrencyLevel = UNSET_INT; long maximumSize = UNSET_INT; long maximumWeight = UNSET_INT; Weigher<? super K, ? super V> weigher; Strength keyStrength; Strength valueStrength; long expireAfterWriteNanos = UNSET_INT; long expireAfterAccessNanos = UNSET_INT; long refreshNanos = UNSET_INT; Equivalence<Object> keyEquivalence; Equivalence<Object> valueEquivalence; RemovalListener<? super K, ? super V> removalListener; Ticker ticker; Supplier<? extends StatsCounter> statsCounterSupplier = NULL_STATS_COUNTER; // TODO(fry): make constructor private and update tests to use newBuilder CacheBuilder() {} /** * Constructs a new {@code CacheBuilder} instance with default settings, including strong keys, * strong values, and no automatic eviction of any kind. */ public static CacheBuilder<Object, Object> newBuilder() { return new CacheBuilder<Object, Object>(); } /** * Constructs a new {@code CacheBuilder} instance with the settings specified in {@code spec}. * * @since 12.0 */ @Beta @GwtIncompatible("To be supported") public static CacheBuilder<Object, Object> from(CacheBuilderSpec spec) { return spec.toCacheBuilder() .lenientParsing(); } /** * Constructs a new {@code CacheBuilder} instance with the settings specified in {@code spec}. * This is especially useful for command-line configuration of a {@code CacheBuilder}. * * @param spec a String in the format specified by {@link CacheBuilderSpec} * @since 12.0 */ @Beta @GwtIncompatible("To be supported") public static CacheBuilder<Object, Object> from(String spec) { return from(CacheBuilderSpec.parse(spec)); } /** * Enables lenient parsing. Useful for tests and spec parsing. */ @GwtIncompatible("To be supported") CacheBuilder<K, V> lenientParsing() { strictParsing = false; return this; } /** * Sets a custom {@code Equivalence} strategy for comparing keys. * * <p>By default, the cache uses {@link Equivalence#identity} to determine key equality when * {@link #weakKeys} is specified, and {@link Equivalence#equals()} otherwise. */ @GwtIncompatible("To be supported") CacheBuilder<K, V> keyEquivalence(Equivalence<Object> equivalence) { checkState(keyEquivalence == null, "key equivalence was already set to %s", keyEquivalence); keyEquivalence = checkNotNull(equivalence); return this; } Equivalence<Object> getKeyEquivalence() { return firstNonNull(keyEquivalence, getKeyStrength().defaultEquivalence()); } /** * Sets a custom {@code Equivalence} strategy for comparing values. * * <p>By default, the cache uses {@link Equivalence#identity} to determine value equality when * {@link #weakValues} or {@link #softValues} is specified, and {@link Equivalence#equals()} * otherwise. */ @GwtIncompatible("To be supported") CacheBuilder<K, V> valueEquivalence(Equivalence<Object> equivalence) { checkState(valueEquivalence == null, "value equivalence was already set to %s", valueEquivalence); this.valueEquivalence = checkNotNull(equivalence); return this; } Equivalence<Object> getValueEquivalence() { return firstNonNull(valueEquivalence, getValueStrength().defaultEquivalence()); } /** * Sets the minimum total size for the internal hash tables. For example, if the initial capacity * is {@code 60}, and the concurrency level is {@code 8}, then eight segments are created, each * having a hash table of size eight. Providing a large enough estimate at construction time * avoids the need for expensive resizing operations later, but setting this value unnecessarily * high wastes memory. * * @throws IllegalArgumentException if {@code initialCapacity} is negative * @throws IllegalStateException if an initial capacity was already set */ public CacheBuilder<K, V> initialCapacity(int initialCapacity) { checkState(this.initialCapacity == UNSET_INT, "initial capacity was already set to %s", this.initialCapacity); checkArgument(initialCapacity >= 0); this.initialCapacity = initialCapacity; return this; } int getInitialCapacity() { return (initialCapacity == UNSET_INT) ? DEFAULT_INITIAL_CAPACITY : initialCapacity; } /** * Guides the allowed concurrency among update operations. Used as a hint for internal sizing. The * table is internally partitioned to try to permit the indicated number of concurrent updates * without contention. Because assignment of entries to these partitions is not necessarily * uniform, the actual concurrency observed may vary. Ideally, you should choose a value to * accommodate as many threads as will ever concurrently modify the table. Using a significantly * higher value than you need can waste space and time, and a significantly lower value can lead * to thread contention. But overestimates and underestimates within an order of magnitude do not * usually have much noticeable impact. A value of one permits only one thread to modify the cache * at a time, but since read operations and cache loading computations can proceed concurrently, * this still yields higher concurrency than full synchronization. * * <p> Defaults to 4. <b>Note:</b>The default may change in the future. If you care about this * value, you should always choose it explicitly. * * <p>The current implementation uses the concurrency level to create a fixed number of hashtable * segments, each governed by its own write lock. The segment lock is taken once for each explicit * write, and twice for each cache loading computation (once prior to loading the new value, * and once after loading completes). Much internal cache management is performed at the segment * granularity. For example, access queues and write queues are kept per segment when they are * required by the selected eviction algorithm. As such, when writing unit tests it is not * uncommon to specify {@code concurrencyLevel(1)} in order to achieve more deterministic eviction * behavior. * * <p>Note that future implementations may abandon segment locking in favor of more advanced * concurrency controls. * * @throws IllegalArgumentException if {@code concurrencyLevel} is nonpositive * @throws IllegalStateException if a concurrency level was already set */ public CacheBuilder<K, V> concurrencyLevel(int concurrencyLevel) { checkState(this.concurrencyLevel == UNSET_INT, "concurrency level was already set to %s", this.concurrencyLevel); checkArgument(concurrencyLevel > 0); this.concurrencyLevel = concurrencyLevel; return this; } int getConcurrencyLevel() { return (concurrencyLevel == UNSET_INT) ? DEFAULT_CONCURRENCY_LEVEL : concurrencyLevel; } /** * Specifies the maximum number of entries the cache may contain. Note that the cache <b>may evict * an entry before this limit is exceeded</b>. As the cache size grows close to the maximum, the * cache evicts entries that are less likely to be used again. For example, the cache may evict an * entry because it hasn't been used recently or very often. * * <p>When {@code size} is zero, elements will be evicted immediately after being loaded into the * cache. This can be useful in testing, or to disable caching temporarily without a code change. * * <p>This feature cannot be used in conjunction with {@link #maximumWeight}. * * @param size the maximum size of the cache * @throws IllegalArgumentException if {@code size} is negative * @throws IllegalStateException if a maximum size or weight was already set */ public CacheBuilder<K, V> maximumSize(long size) { checkState(this.maximumSize == UNSET_INT, "maximum size was already set to %s", this.maximumSize); checkState(this.maximumWeight == UNSET_INT, "maximum weight was already set to %s", this.maximumWeight); checkState(this.weigher == null, "maximum size can not be combined with weigher"); checkArgument(size >= 0, "maximum size must not be negative"); this.maximumSize = size; return this; } /** * Specifies the maximum weight of entries the cache may contain. Weight is determined using the * {@link Weigher} specified with {@link #weigher}, and use of this method requires a * corresponding call to {@link #weigher} prior to calling {@link #build}. * * <p>Note that the cache <b>may evict an entry before this limit is exceeded</b>. As the cache * size grows close to the maximum, the cache evicts entries that are less likely to be used * again. For example, the cache may evict an entry because it hasn't been used recently or very * often. * * <p>When {@code weight} is zero, elements will be evicted immediately after being loaded into * cache. This can be useful in testing, or to disable caching temporarily without a code * change. * * <p>Note that weight is only used to determine whether the cache is over capacity; it has no * effect on selecting which entry should be evicted next. * * <p>This feature cannot be used in conjunction with {@link #maximumSize}. * * @param weight the maximum total weight of entries the cache may contain * @throws IllegalArgumentException if {@code weight} is negative * @throws IllegalStateException if a maximum weight or size was already set * @since 11.0 */ @GwtIncompatible("To be supported") public CacheBuilder<K, V> maximumWeight(long weight) { checkState(this.maximumWeight == UNSET_INT, "maximum weight was already set to %s", this.maximumWeight); checkState(this.maximumSize == UNSET_INT, "maximum size was already set to %s", this.maximumSize); this.maximumWeight = weight; checkArgument(weight >= 0, "maximum weight must not be negative"); return this; } /** * Specifies the weigher to use in determining the weight of entries. Entry weight is taken * into consideration by {@link #maximumWeight(long)} when determining which entries to evict, and * use of this method requires a corresponding call to {@link #maximumWeight(long)} prior to * calling {@link #build}. Weights are measured and recorded when entries are inserted into the * cache, and are thus effectively static during the lifetime of a cache entry. * * <p>When the weight of an entry is zero it will not be considered for size-based eviction * (though it still may be evicted by other means). * * <p><b>Important note:</b> Instead of returning <em>this</em> as a {@code CacheBuilder} * instance, this method returns {@code CacheBuilder<K1, V1>}. From this point on, either the * original reference or the returned reference may be used to complete configuration and build * the cache, but only the "generic" one is type-safe. That is, it will properly prevent you from * building caches whose key or value types are incompatible with the types accepted by the * weigher already provided; the {@code CacheBuilder} type cannot do this. For best results, * simply use the standard method-chaining idiom, as illustrated in the documentation at top, * configuring a {@code CacheBuilder} and building your {@link Cache} all in a single statement. * * <p><b>Warning:</b> if you ignore the above advice, and use this {@code CacheBuilder} to build * a cache whose key or value type is incompatible with the weigher, you will likely experience * a {@link ClassCastException} at some <i>undefined</i> point in the future. * * @param weigher the weigher to use in calculating the weight of cache entries * @throws IllegalArgumentException if {@code size} is negative * @throws IllegalStateException if a maximum size was already set * @since 11.0 */ @GwtIncompatible("To be supported") public <K1 extends K, V1 extends V> CacheBuilder<K1, V1> weigher( Weigher<? super K1, ? super V1> weigher) { checkState(this.weigher == null); if (strictParsing) { checkState(this.maximumSize == UNSET_INT, "weigher can not be combined with maximum size", this.maximumSize); } // safely limiting the kinds of caches this can produce @SuppressWarnings("unchecked") CacheBuilder<K1, V1> me = (CacheBuilder<K1, V1>) this; me.weigher = checkNotNull(weigher); return me; } long getMaximumWeight() { if (expireAfterWriteNanos == 0 || expireAfterAccessNanos == 0) { return 0; } return (weigher == null) ? maximumSize : maximumWeight; } // Make a safe contravariant cast now so we don't have to do it over and over. @SuppressWarnings("unchecked") <K1 extends K, V1 extends V> Weigher<K1, V1> getWeigher() { return (Weigher<K1, V1>) Objects.firstNonNull(weigher, OneWeigher.INSTANCE); } /** * Specifies that each key (not value) stored in the cache should be wrapped in a {@link * WeakReference} (by default, strong references are used). * * <p><b>Warning:</b> when this method is used, the resulting cache will use identity ({@code ==}) * comparison to determine equality of keys. * * <p>Entries with keys that have been garbage collected may be counted in {@link Cache#size}, * but will never be visible to read or write operations; such entries are cleaned up as part of * the routine maintenance described in the class javadoc. * * @throws IllegalStateException if the key strength was already set */ @GwtIncompatible("java.lang.ref.WeakReference") public CacheBuilder<K, V> weakKeys() { return setKeyStrength(Strength.WEAK); } CacheBuilder<K, V> setKeyStrength(Strength strength) { checkState(keyStrength == null, "Key strength was already set to %s", keyStrength); keyStrength = checkNotNull(strength); return this; } Strength getKeyStrength() { return firstNonNull(keyStrength, Strength.STRONG); } /** * Specifies that each value (not key) stored in the cache should be wrapped in a * {@link WeakReference} (by default, strong references are used). * * <p>Weak values will be garbage collected once they are weakly reachable. This makes them a poor * candidate for caching; consider {@link #softValues} instead. * * <p><b>Note:</b> when this method is used, the resulting cache will use identity ({@code ==}) * comparison to determine equality of values. * * <p>Entries with values that have been garbage collected may be counted in {@link Cache#size}, * but will never be visible to read or write operations; such entries are cleaned up as part of * the routine maintenance described in the class javadoc. * * @throws IllegalStateException if the value strength was already set */ @GwtIncompatible("java.lang.ref.WeakReference") public CacheBuilder<K, V> weakValues() { return setValueStrength(Strength.WEAK); } /** * Specifies that each value (not key) stored in the cache should be wrapped in a * {@link SoftReference} (by default, strong references are used). Softly-referenced objects will * be garbage-collected in a <i>globally</i> least-recently-used manner, in response to memory * demand. * * <p><b>Warning:</b> in most circumstances it is better to set a per-cache {@linkplain * #maximumSize(long) maximum size} instead of using soft references. You should only use this * method if you are well familiar with the practical consequences of soft references. * * <p><b>Note:</b> when this method is used, the resulting cache will use identity ({@code ==}) * comparison to determine equality of values. * * <p>Entries with values that have been garbage collected may be counted in {@link Cache#size}, * but will never be visible to read or write operations; such entries are cleaned up as part of * the routine maintenance described in the class javadoc. * * @throws IllegalStateException if the value strength was already set */ @GwtIncompatible("java.lang.ref.SoftReference") public CacheBuilder<K, V> softValues() { return setValueStrength(Strength.SOFT); } CacheBuilder<K, V> setValueStrength(Strength strength) { checkState(valueStrength == null, "Value strength was already set to %s", valueStrength); valueStrength = checkNotNull(strength); return this; } Strength getValueStrength() { return firstNonNull(valueStrength, Strength.STRONG); } /** * Specifies that each entry should be automatically removed from the cache once a fixed duration * has elapsed after the entry's creation, or the most recent replacement of its value. * * <p>When {@code duration} is zero, this method hands off to * {@link #maximumSize(long) maximumSize}{@code (0)}, ignoring any otherwise-specificed maximum * size or weight. This can be useful in testing, or to disable caching temporarily without a code * change. * * <p>Expired entries may be counted in {@link Cache#size}, but will never be visible to read or * write operations. Expired entries are cleaned up as part of the routine maintenance described * in the class javadoc. * * @param duration the length of time after an entry is created that it should be automatically * removed * @param unit the unit that {@code duration} is expressed in * @throws IllegalArgumentException if {@code duration} is negative * @throws IllegalStateException if the time to live or time to idle was already set */ public CacheBuilder<K, V> expireAfterWrite(long duration, TimeUnit unit) { checkState(expireAfterWriteNanos == UNSET_INT, "expireAfterWrite was already set to %s ns", expireAfterWriteNanos); checkArgument(duration >= 0, "duration cannot be negative: %s %s", duration, unit); this.expireAfterWriteNanos = unit.toNanos(duration); return this; } long getExpireAfterWriteNanos() { return (expireAfterWriteNanos == UNSET_INT) ? DEFAULT_EXPIRATION_NANOS : expireAfterWriteNanos; } /** * Specifies that each entry should be automatically removed from the cache once a fixed duration * has elapsed after the entry's creation, the most recent replacement of its value, or its last * access. Access time is reset by all cache read and write operations (including * {@code Cache.asMap().get(Object)} and {@code Cache.asMap().put(K, V)}), but not by operations * on the collection-views of {@link Cache#asMap}. * * <p>When {@code duration} is zero, this method hands off to * {@link #maximumSize(long) maximumSize}{@code (0)}, ignoring any otherwise-specificed maximum * size or weight. This can be useful in testing, or to disable caching temporarily without a code * change. * * <p>Expired entries may be counted in {@link Cache#size}, but will never be visible to read or * write operations. Expired entries are cleaned up as part of the routine maintenance described * in the class javadoc. * * @param duration the length of time after an entry is last accessed that it should be * automatically removed * @param unit the unit that {@code duration} is expressed in * @throws IllegalArgumentException if {@code duration} is negative * @throws IllegalStateException if the time to idle or time to live was already set */ public CacheBuilder<K, V> expireAfterAccess(long duration, TimeUnit unit) { checkState(expireAfterAccessNanos == UNSET_INT, "expireAfterAccess was already set to %s ns", expireAfterAccessNanos); checkArgument(duration >= 0, "duration cannot be negative: %s %s", duration, unit); this.expireAfterAccessNanos = unit.toNanos(duration); return this; } long getExpireAfterAccessNanos() { return (expireAfterAccessNanos == UNSET_INT) ? DEFAULT_EXPIRATION_NANOS : expireAfterAccessNanos; } /** * Specifies that active entries are eligible for automatic refresh once a fixed duration has * elapsed after the entry's creation, or the most recent replacement of its value. The semantics * of refreshes are specified in {@link LoadingCache#refresh}, and are performed by calling * {@link CacheLoader#reload}. * * <p>As the default implementation of {@link CacheLoader#reload} is synchronous, it is * recommended that users of this method override {@link CacheLoader#reload} with an asynchronous * implementation; otherwise refreshes will be performed during unrelated cache read and write * operations. * * <p>Currently automatic refreshes are performed when the first stale request for an entry * occurs. The request triggering refresh will make a blocking call to {@link CacheLoader#reload} * and immediately return the new value if the returned future is complete, and the old value * otherwise. * * <p><b>Note:</b> <i>all exceptions thrown during refresh will be logged and then swallowed</i>. * * @param duration the length of time after an entry is created that it should be considered * stale, and thus eligible for refresh * @param unit the unit that {@code duration} is expressed in * @throws IllegalArgumentException if {@code duration} is negative * @throws IllegalStateException if the refresh interval was already set * @since 11.0 */ @Beta @GwtIncompatible("To be supported (synchronously).") public CacheBuilder<K, V> refreshAfterWrite(long duration, TimeUnit unit) { checkNotNull(unit); checkState(refreshNanos == UNSET_INT, "refresh was already set to %s ns", refreshNanos); checkArgument(duration > 0, "duration must be positive: %s %s", duration, unit); this.refreshNanos = unit.toNanos(duration); return this; } long getRefreshNanos() { return (refreshNanos == UNSET_INT) ? DEFAULT_REFRESH_NANOS : refreshNanos; } /** * Specifies a nanosecond-precision time source for use in determining when entries should be * expired. By default, {@link System#nanoTime} is used. * * <p>The primary intent of this method is to facilitate testing of caches which have been * configured with {@link #expireAfterWrite} or {@link #expireAfterAccess}. * * @throws IllegalStateException if a ticker was already set */ public CacheBuilder<K, V> ticker(Ticker ticker) { checkState(this.ticker == null); this.ticker = checkNotNull(ticker); return this; } Ticker getTicker(boolean recordsTime) { if (ticker != null) { return ticker; } return recordsTime ? Ticker.systemTicker() : NULL_TICKER; } /** * Specifies a listener instance, which all caches built using this {@code CacheBuilder} will * notify each time an entry is removed from the cache by any means. * * <p>Each cache built by this {@code CacheBuilder} after this method is called invokes the * supplied listener after removing an element for any reason (see removal causes in {@link * RemovalCause}). It will invoke the listener as part of the routine maintenance described * in the class javadoc. * * <p><b>Note:</b> <i>all exceptions thrown by {@code listener} will be logged (using * {@link java.util.logging.Logger})and then swallowed</i>. * * <p><b>Important note:</b> Instead of returning <em>this</em> as a {@code CacheBuilder} * instance, this method returns {@code CacheBuilder<K1, V1>}. From this point on, either the * original reference or the returned reference may be used to complete configuration and build * the cache, but only the "generic" one is type-safe. That is, it will properly prevent you from * building caches whose key or value types are incompatible with the types accepted by the * listener already provided; the {@code CacheBuilder} type cannot do this. For best results, * simply use the standard method-chaining idiom, as illustrated in the documentation at top, * configuring a {@code CacheBuilder} and building your {@link Cache} all in a single statement. * * <p><b>Warning:</b> if you ignore the above advice, and use this {@code CacheBuilder} to build * a cache whose key or value type is incompatible with the listener, you will likely experience * a {@link ClassCastException} at some <i>undefined</i> point in the future. * * @throws IllegalStateException if a removal listener was already set */ @CheckReturnValue public <K1 extends K, V1 extends V> CacheBuilder<K1, V1> removalListener( RemovalListener<? super K1, ? super V1> listener) { checkState(this.removalListener == null); // safely limiting the kinds of caches this can produce @SuppressWarnings("unchecked") CacheBuilder<K1, V1> me = (CacheBuilder<K1, V1>) this; me.removalListener = checkNotNull(listener); return me; } // Make a safe contravariant cast now so we don't have to do it over and over. @SuppressWarnings("unchecked") <K1 extends K, V1 extends V> RemovalListener<K1, V1> getRemovalListener() { return (RemovalListener<K1, V1>) Objects.firstNonNull(removalListener, NullListener.INSTANCE); } /** * Enable the accumulation of {@link CacheStats} during the operation of the cache. Without this * {@link Cache#stats} will return zero for all statistics. Note that recording stats requires * bookkeeping to be performed with each operation, and thus imposes a performance penalty on * cache operation. * * @since 12.0 (previously, stats collection was automatic) */ public CacheBuilder<K, V> recordStats() { statsCounterSupplier = CACHE_STATS_COUNTER; return this; } Supplier<? extends StatsCounter> getStatsCounterSupplier() { return statsCounterSupplier; } /** * Builds a cache, which either returns an already-loaded value for a given key or atomically * computes or retrieves it using the supplied {@code CacheLoader}. If another thread is currently * loading the value for this key, simply waits for that thread to finish and returns its * loaded value. Note that multiple threads can concurrently load values for distinct keys. * * <p>This method does not alter the state of this {@code CacheBuilder} instance, so it can be * invoked again to create multiple independent caches. * * @param loader the cache loader used to obtain new values * @return a cache having the requested features */ public <K1 extends K, V1 extends V> LoadingCache<K1, V1> build( CacheLoader<? super K1, V1> loader) { checkWeightWithWeigher(); return new LocalCache.LocalLoadingCache<K1, V1>(this, loader); } /** * Builds a cache which does not automatically load values when keys are requested. * * <p>Consider {@link #build(CacheLoader)} instead, if it is feasible to implement a * {@code CacheLoader}. * * <p>This method does not alter the state of this {@code CacheBuilder} instance, so it can be * invoked again to create multiple independent caches. * * @return a cache having the requested features * @since 11.0 */ public <K1 extends K, V1 extends V> Cache<K1, V1> build() { checkWeightWithWeigher(); checkNonLoadingCache(); return new LocalCache.LocalManualCache<K1, V1>(this); } private void checkNonLoadingCache() { checkState(refreshNanos == UNSET_INT, "refreshAfterWrite requires a LoadingCache"); } private void checkWeightWithWeigher() { if (weigher == null) { checkState(maximumWeight == UNSET_INT, "maximumWeight requires weigher"); } else { if (strictParsing) { checkState(maximumWeight != UNSET_INT, "weigher requires maximumWeight"); } else { if (maximumWeight == UNSET_INT) { logger.log(Level.WARNING, "ignoring weigher specified without maximumWeight"); } } } } /** * Returns a string representation for this CacheBuilder instance. The exact form of the returned * string is not specified. */ @Override public String toString() { Objects.ToStringHelper s = Objects.toStringHelper(this); if (initialCapacity != UNSET_INT) { s.add("initialCapacity", initialCapacity); } if (concurrencyLevel != UNSET_INT) { s.add("concurrencyLevel", concurrencyLevel); } if (maximumWeight != UNSET_INT) { if (weigher == null) { s.add("maximumSize", maximumWeight); } else { s.add("maximumWeight", maximumWeight); } } if (expireAfterWriteNanos != UNSET_INT) { s.add("expireAfterWrite", expireAfterWriteNanos + "ns"); } if (expireAfterAccessNanos != UNSET_INT) { s.add("expireAfterAccess", expireAfterAccessNanos + "ns"); } if (keyStrength != null) { s.add("keyStrength", Ascii.toLowerCase(keyStrength.toString())); } if (valueStrength != null) { s.add("valueStrength", Ascii.toLowerCase(valueStrength.toString())); } if (keyEquivalence != null) { s.addValue("keyEquivalence"); } if (valueEquivalence != null) { s.addValue("valueEquivalence"); } if (removalListener != null) { s.addValue("removalListener"); } return s.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.data; import org.apache.flink.table.data.binary.TypedSetters; import org.apache.flink.table.data.vector.ArrayColumnVector; import org.apache.flink.table.data.vector.BooleanColumnVector; import org.apache.flink.table.data.vector.ByteColumnVector; import org.apache.flink.table.data.vector.BytesColumnVector; import org.apache.flink.table.data.vector.ColumnVector; import org.apache.flink.table.data.vector.DecimalColumnVector; import org.apache.flink.table.data.vector.DoubleColumnVector; import org.apache.flink.table.data.vector.FloatColumnVector; import org.apache.flink.table.data.vector.IntColumnVector; import org.apache.flink.table.data.vector.LongColumnVector; import org.apache.flink.table.data.vector.ShortColumnVector; import org.apache.flink.table.data.vector.TimestampColumnVector; import java.util.Arrays; /** * Columnar array to support access to vector column data. */ public final class ColumnarArrayData implements ArrayData, TypedSetters { private final ColumnVector data; private final int offset; private final int numElements; public ColumnarArrayData(ColumnVector data, int offset, int numElements) { this.data = data; this.offset = offset; this.numElements = numElements; } @Override public int size() { return numElements; } @Override public boolean isNullAt(int pos) { return data.isNullAt(offset + pos); } @Override public void setNullAt(int pos) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public boolean getBoolean(int pos) { return ((BooleanColumnVector) data).getBoolean(offset + pos); } @Override public byte getByte(int pos) { return ((ByteColumnVector) data).getByte(offset + pos); } @Override public short getShort(int pos) { return ((ShortColumnVector) data).getShort(offset + pos); } @Override public int getInt(int pos) { return ((IntColumnVector) data).getInt(offset + pos); } @Override public long getLong(int pos) { return ((LongColumnVector) data).getLong(offset + pos); } @Override public float getFloat(int pos) { return ((FloatColumnVector) data).getFloat(offset + pos); } @Override public double getDouble(int pos) { return ((DoubleColumnVector) data).getDouble(offset + pos); } @Override public StringData getString(int pos) { BytesColumnVector.Bytes byteArray = getByteArray(pos); return StringData.fromBytes(byteArray.data, byteArray.offset, byteArray.len); } @Override public DecimalData getDecimal(int pos, int precision, int scale) { return ((DecimalColumnVector) data).getDecimal(offset + pos, precision, scale); } @Override public TimestampData getTimestamp(int pos, int precision) { return ((TimestampColumnVector) data).getTimestamp(offset + pos, precision); } @Override public <T> RawValueData<T> getRawValue(int pos) { throw new UnsupportedOperationException("RawValueData is not supported."); } @Override public byte[] getBinary(int pos) { BytesColumnVector.Bytes byteArray = getByteArray(pos); if (byteArray.len == byteArray.data.length) { return byteArray.data; } else { return Arrays.copyOfRange(byteArray.data, byteArray.offset, byteArray.len); } } @Override public ArrayData getArray(int pos) { return ((ArrayColumnVector) data).getArray(offset + pos); } @Override public MapData getMap(int pos) { throw new UnsupportedOperationException("Map is not supported."); } @Override public RowData getRow(int pos, int numFields) { throw new UnsupportedOperationException("Row is not supported."); } @Override public void setBoolean(int pos, boolean value) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public void setByte(int pos, byte value) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public void setShort(int pos, short value) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public void setInt(int pos, int value) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public void setLong(int pos, long value) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public void setFloat(int pos, float value) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public void setDouble(int pos, double value) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public void setDecimal(int pos, DecimalData value, int precision) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public void setTimestamp(int pos, TimestampData value, int precision) { throw new UnsupportedOperationException("Not support the operation!"); } @Override public boolean[] toBooleanArray() { boolean[] res = new boolean[numElements]; for (int i = 0; i < numElements; i++) { res[i] = getBoolean(i); } return res; } @Override public byte[] toByteArray() { byte[] res = new byte[numElements]; for (int i = 0; i < numElements; i++) { res[i] = getByte(i); } return res; } @Override public short[] toShortArray() { short[] res = new short[numElements]; for (int i = 0; i < numElements; i++) { res[i] = getShort(i); } return res; } @Override public int[] toIntArray() { int[] res = new int[numElements]; for (int i = 0; i < numElements; i++) { res[i] = getInt(i); } return res; } @Override public long[] toLongArray() { long[] res = new long[numElements]; for (int i = 0; i < numElements; i++) { res[i] = getLong(i); } return res; } @Override public float[] toFloatArray() { float[] res = new float[numElements]; for (int i = 0; i < numElements; i++) { res[i] = getFloat(i); } return res; } @Override public double[] toDoubleArray() { double[] res = new double[numElements]; for (int i = 0; i < numElements; i++) { res[i] = getDouble(i); } return res; } private BytesColumnVector.Bytes getByteArray(int pos) { return ((BytesColumnVector) data).getBytes(offset + pos); } }
package org.rabix.bindings.draft3.resolver; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.net.URI; import java.net.URL; import java.net.URLConnection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang.StringUtils; import org.rabix.bindings.BindingException; import org.rabix.bindings.BindingWrongVersionException; import org.rabix.bindings.ProtocolType; import org.rabix.bindings.helper.URIHelper; import org.rabix.common.helper.JSONHelper; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; import com.google.common.base.Preconditions; public class Draft3DocumentResolver { public static Set<String> types = new HashSet<String>(); static { types.add("null"); types.add("boolean"); types.add("int"); types.add("long"); types.add("float"); types.add("double"); types.add("string"); types.add("File"); types.add("record"); types.add("enum"); types.add("array"); types.add("Any"); } public static final String APP_STEP_KEY = "run"; public static final String TYPE_KEY = "type"; public static final String RESOLVER_REFERENCE_KEY = "$import"; public static final String RESOLVER_REFERENCE_INCLUDE_KEY = "$include"; public static final String GRAPH_KEY = "$graph"; public static final String SCHEMA_KEY = "$schemas"; public static final String NAMESPACES_KEY = "$namespaces"; public static final String SCHEMADEF_KEY = "SchemaDefRequirement"; public static final String CWL_VERSION_KEY = "cwlVersion"; public static final String RESOLVER_JSON_POINTER_KEY = "$job"; public static final String DOCUMENT_FRAGMENT_SEPARATOR = "#"; private static final String DEFAULT_ENCODING = "UTF-8"; private static boolean graphResolve = false; private static Map<String, String> namespaces = new HashMap<String, String>(); private static Map<String, Map<String, Draft3DocumentResolverReference>> referenceCache = new HashMap<>(); private static Map<String, LinkedHashSet<Draft3DocumentResolverReplacement>> replacements = new HashMap<>(); public static String resolve(String appUrl) throws BindingException { String appUrlBase = appUrl; try { URI uri = URI.create(appUrl); if (uri.getScheme().equals(URIHelper.DATA_URI_SCHEME)) { appUrlBase = URIHelper.extractBase(appUrl); } } catch (IllegalArgumentException e) { } File file = null; JsonNode root = null; try { boolean isFile = URIHelper.isFile(appUrlBase); if (isFile) { file = new File(URIHelper.getURIInfo(appUrlBase)); } else { file = new File("."); } root = JSONHelper.readJsonNode(URIHelper.getData(appUrlBase)); } catch (IOException e) { throw new BindingException(e); } if(root.has(GRAPH_KEY)) { graphResolve = true; } if(root.has(NAMESPACES_KEY)) { populateNamespaces(root); ((ObjectNode) root).remove(NAMESPACES_KEY); } JsonNode cwlVersion = root.get(CWL_VERSION_KEY); if (cwlVersion == null || !(cwlVersion.asText().equals(ProtocolType.DRAFT3.appVersion))) { clearReplacements(appUrl); clearReferenceCache(appUrl); throw new BindingWrongVersionException("Document version is not " + ProtocolType.DRAFT3.appVersion); } traverse(appUrl, root, file, null, root); for (Draft3DocumentResolverReplacement replacement : getReplacements(appUrl)) { if (replacement.getParentNode().isArray()) { replaceArrayItem(appUrl, root, replacement); } else if (replacement.getParentNode().isObject()) { replaceObjectItem(appUrl, root, replacement); } } if (root.has(SCHEMA_KEY)) { throw new NotImplementedException("Feature not implemented"); } if(graphResolve) { String fragment = URIHelper.extractFragment(appUrl).substring(1); clearReplacements(appUrl); clearReferenceCache(appUrl); removeFragmentIdentifier(appUrl, root, file, null, root, fragment); for (Draft3DocumentResolverReplacement replacement : getReplacements(appUrl)) { if (replacement.getParentNode().isArray()) { replaceArrayItem(appUrl, root, replacement); } else if (replacement.getParentNode().isObject()) { replaceObjectItem(appUrl, root, replacement); } } for (final JsonNode elem : root.get(GRAPH_KEY)) { if (elem.get("id").asText().equals(fragment)) { Map<String, Object> result = JSONHelper.readMap(elem); result.put(CWL_VERSION_KEY, cwlVersion); root = JSONHelper.convertToJsonNode(result); break; } } graphResolve = false; } else { if (!(root.get(CWL_VERSION_KEY).asText().equals(ProtocolType.DRAFT3.appVersion))) { clearReplacements(appUrl); clearReferenceCache(appUrl); throw new BindingException("Document version is not cwl:draft-3"); } } clearReplacements(appUrl); clearReferenceCache(appUrl); return JSONHelper.writeObject(root); } private static void populateNamespaces(JsonNode root) { Iterator<Entry<String, JsonNode>> fieldIterator = root.get(NAMESPACES_KEY).fields(); while (fieldIterator.hasNext()) { Entry<String, JsonNode> fieldEntry = fieldIterator.next(); namespaces.put(fieldEntry.getKey(), fieldEntry.getValue().asText()); } } private static JsonNode traverse(String appUrl, JsonNode root, File file, JsonNode parentNode, JsonNode currentNode) throws BindingException { Preconditions.checkNotNull(currentNode, "current node id is null"); boolean isInclude = currentNode.has(RESOLVER_REFERENCE_INCLUDE_KEY); if (isInclude) { String path = currentNode.get(RESOLVER_REFERENCE_INCLUDE_KEY).textValue(); String content = loadContents(file, path); Draft3DocumentResolverReference reference = new Draft3DocumentResolverReference(false, new TextNode(content)); getReferenceCache(appUrl).put(path, reference); getReplacements(appUrl).add(new Draft3DocumentResolverReplacement(parentNode, currentNode, path)); return null; } namespace(currentNode); boolean isReference = currentNode.has(RESOLVER_REFERENCE_KEY); boolean appReference = currentNode.has(APP_STEP_KEY) && currentNode.get(APP_STEP_KEY).isTextual(); boolean typeReference = currentNode.has(TYPE_KEY) && currentNode.get(TYPE_KEY).isTextual() && isTypeReference(currentNode.get(TYPE_KEY).textValue()); boolean isJsonPointer = currentNode.has(RESOLVER_JSON_POINTER_KEY) && parentNode != null; // we skip the first level $job if (isReference || isJsonPointer || appReference || typeReference) { String referencePath = null; if (isReference) { referencePath = currentNode.get(RESOLVER_REFERENCE_KEY).textValue(); } else if (appReference) { referencePath = currentNode.get(APP_STEP_KEY).textValue(); } else if(typeReference) { referencePath = currentNode.get(TYPE_KEY).textValue(); } else { referencePath = currentNode.get(RESOLVER_JSON_POINTER_KEY).textValue(); } Draft3DocumentResolverReference reference = getReferenceCache(appUrl).get(referencePath); if (reference != null) { if (reference.isResolving()) { throw new BindingException("Circular dependency detected!"); } } else { reference = new Draft3DocumentResolverReference(); reference.setResolving(true); getReferenceCache(appUrl).put(referencePath, reference); JsonNode referenceDocumentRoot = findDocumentRoot(root, file, referencePath, isJsonPointer); ParentChild parentChild = findReferencedNode(referenceDocumentRoot, referencePath); JsonNode resolvedNode = traverse(appUrl, root, file, parentChild.parent, parentChild.child); if(resolvedNode == null) { return null; } reference.setResolvedNode(resolvedNode); reference.setResolving(false); getReferenceCache(appUrl).put(referencePath, reference); } if(appReference) { getReplacements(appUrl).add(new Draft3DocumentResolverReplacement(currentNode, currentNode.get("run"), referencePath)); } else if(typeReference) { getReplacements(appUrl).add(new Draft3DocumentResolverReplacement(currentNode, currentNode.get("type"), referencePath)); } else { getReplacements(appUrl).add(new Draft3DocumentResolverReplacement(parentNode, currentNode, referencePath)); } return reference.getResolvedNode(); } else if (currentNode.isContainerNode()) { for (JsonNode subnode : currentNode) { traverse(appUrl, root, file, currentNode, subnode); } } return currentNode; } private static void namespace(JsonNode currentNode) { Iterator<Entry<String, JsonNode>> fieldIterator = currentNode.fields(); while (fieldIterator.hasNext()) { Entry<String, JsonNode> fieldEntry = fieldIterator.next(); if(fieldEntry.getValue().isTextual() && namespaces.keySet().contains(fieldEntry.getValue().asText().split(":")[0])) { String prefix = namespaces.get(fieldEntry.getValue().asText().split(":")[0]); String namespacedValue = fieldEntry.getValue().asText().replace(fieldEntry.getValue().asText().split(":")[0] + ":", prefix); ((ObjectNode) currentNode).put(fieldEntry.getKey(), namespacedValue); } } } private static boolean isTypeReference(String type) { if(types.contains(type)) { return false; } return true; } @SuppressWarnings("deprecation") private static void replaceObjectItem(String appUrl, JsonNode root, Draft3DocumentResolverReplacement replacement) throws BindingException { JsonNode parent = replacement.getParentNode() == null ? root : replacement.getParentNode(); Iterator<Entry<String, JsonNode>> fieldIterator = parent.fields(); String fieldName = null; while (fieldIterator.hasNext()) { Entry<String, JsonNode> fieldEntry = fieldIterator.next(); if (fieldEntry.getValue().equals(replacement.getReferenceNode())) { fieldName = fieldEntry.getKey(); fieldIterator.remove(); break; } } Draft3DocumentResolverReference reference = getReferenceCache(appUrl).get(replacement.getNormalizedReferencePath()); if (reference != null) { ((ObjectNode) parent).put(fieldName, reference.getResolvedNode()); } else { throw new BindingException("Cannot find reference " + replacement.getNormalizedReferencePath()); } } private static void replaceArrayItem(String appUrl, JsonNode root, Draft3DocumentResolverReplacement replacement) throws BindingException { JsonNode parent = replacement.getParentNode() == null ? root : replacement.getParentNode(); Iterator<JsonNode> nodeIterator = parent.elements(); while (nodeIterator.hasNext()) { JsonNode subnode = nodeIterator.next(); if (subnode.equals(replacement.getReferenceNode())) { nodeIterator.remove(); break; } } if (parent.isArray()) { Draft3DocumentResolverReference reference = getReferenceCache(appUrl).get(replacement.getNormalizedReferencePath()); if (reference != null) { ((ArrayNode) parent).add(reference.getResolvedNode()); } else { throw new BindingException("Cannot find reference " + replacement.getNormalizedReferencePath()); } } } private static JsonNode findDocumentRoot(JsonNode root, File file, String reference, boolean isJsonPointer) throws BindingException { JsonNode startNode = root; if (isJsonPointer) { startNode = startNode.get(RESOLVER_JSON_POINTER_KEY); } int start = reference.indexOf(DOCUMENT_FRAGMENT_SEPARATOR); if (start == 0) { return startNode; } else { String[] parts = reference.split(DOCUMENT_FRAGMENT_SEPARATOR); if (parts.length > 2) { throw new BindingException("Invalid reference " + reference); } String contents = loadContents(file, parts[0]); try { return JSONHelper.readJsonNode(contents); } catch (Exception e) { throw new BindingException(e); } } } private static String loadContents(File file, String path) throws BindingException { if (path.startsWith("http")) { try { URL website = new URL(path); URLConnection connection = website.openConnection(); BufferedReader in = null; try { in = new BufferedReader(new InputStreamReader(connection.getInputStream())); StringBuilder response = new StringBuilder(); String inputLine; while ((inputLine = in.readLine()) != null) { response.append(inputLine); } return response.toString(); } finally { if (in != null) { in.close(); } } } catch (Exception e) { throw new BindingException("Couldn't fetch contents from " + path); } } else { try { String filePath = new File(file.getParentFile(), path).getCanonicalPath(); return FileUtils.readFileToString(new File(filePath), DEFAULT_ENCODING); } catch (IOException e) { throw new BindingException("Couldn't fetch contents from " + path); } } } private static ParentChild findReferencedNode(JsonNode rootNode, String absolutePath) { if (!absolutePath.contains(DOCUMENT_FRAGMENT_SEPARATOR)) { return new ParentChild(null, rootNode); } String subpath = absolutePath.substring(absolutePath.indexOf(DOCUMENT_FRAGMENT_SEPARATOR) + 1); String[] parts = subpath.split("/"); if(rootNode.has("$graph")) { JsonNode objects = rootNode.get("$graph"); JsonNode child = null; JsonNode parent = objects; for(final JsonNode elem: objects) { if(elem.get("id").asText().equals(parts[0])) { child = elem; break; } } return new ParentChild(parent, child); } else if (rootNode.has("class") && rootNode.get("class").asText().equals(SCHEMADEF_KEY)) { JsonNode objects = rootNode.get("types"); JsonNode child = null; for(final JsonNode elem: objects) { if(elem.get("name").asText().equals(parts[0])) { child = elem; break; } } return new ParentChild(null, child); } JsonNode parent = null; JsonNode child = rootNode; for (String part : parts) { if (StringUtils.isEmpty(part)) { continue; } parent = child; child = child.get(part); } return new ParentChild(parent, child); } private static JsonNode removeFragmentIdentifier(String appUrl, JsonNode root, File file, JsonNode parentNode, JsonNode currentNode, String fragment) throws BindingException { Preconditions.checkNotNull(currentNode, "current node id is null"); if(currentNode.isTextual() && currentNode.asText().startsWith(DOCUMENT_FRAGMENT_SEPARATOR)) { Draft3DocumentResolverReference reference = new Draft3DocumentResolverReference(); reference.setResolvedNode(JsonNodeFactory.instance.textNode(currentNode.asText().replace(fragment + "/", ""))); getReferenceCache(appUrl).put(currentNode.asText(), reference); getReplacements(appUrl).add(new Draft3DocumentResolverReplacement(parentNode, currentNode, currentNode.asText())); } else if (currentNode.isContainerNode()) { for (JsonNode subnode : currentNode) { removeFragmentIdentifier(appUrl, root, file, currentNode, subnode, fragment); } } return currentNode; } private synchronized static Set<Draft3DocumentResolverReplacement> getReplacements(String url) { LinkedHashSet<Draft3DocumentResolverReplacement> replacementsPerUrl = replacements.get(url); if (replacementsPerUrl == null) { replacementsPerUrl = new LinkedHashSet<Draft3DocumentResolverReplacement>(); replacements.put(url, replacementsPerUrl); } return replacementsPerUrl; } private synchronized static void clearReplacements(String url) { replacements.remove(url); } private synchronized static Map<String, Draft3DocumentResolverReference> getReferenceCache(String url) { Map<String, Draft3DocumentResolverReference> referenceCachePerUrl = referenceCache.get(url); if (referenceCachePerUrl == null) { referenceCachePerUrl = new HashMap<String, Draft3DocumentResolverReference>(); referenceCache.put(url, referenceCachePerUrl); } return referenceCachePerUrl; } private synchronized static void clearReferenceCache(String url) { referenceCache.remove(url); } private static class ParentChild { JsonNode parent; JsonNode child; ParentChild(JsonNode parent, JsonNode child) { this.parent = parent; this.child = child; } @Override public String toString() { return "ParentChild [parent=" + parent + ", child=" + child + "]"; } } }
/* * Copyright 2015-2017 Austin Keener & Michael Ritter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.dv8tion.jda.core.entities.impl; import net.dv8tion.jda.core.JDA; import net.dv8tion.jda.core.Permission; import net.dv8tion.jda.core.entities.Channel; import net.dv8tion.jda.core.entities.Guild; import net.dv8tion.jda.core.entities.Role; import net.dv8tion.jda.core.exceptions.PermissionException; import net.dv8tion.jda.core.managers.RoleManager; import net.dv8tion.jda.core.managers.RoleManagerUpdatable; import net.dv8tion.jda.core.requests.Request; import net.dv8tion.jda.core.requests.Response; import net.dv8tion.jda.core.requests.RestAction; import net.dv8tion.jda.core.requests.Route; import net.dv8tion.jda.core.utils.PermissionUtil; import org.apache.http.util.Args; import java.awt.Color; import java.time.OffsetDateTime; import java.util.Collection; import java.util.Collections; import java.util.List; public class RoleImpl implements Role { private final long id; private final Guild guild; private final Object mngLock = new Object(); private volatile RoleManager manager; private volatile RoleManagerUpdatable managerUpdatable; private String name; private Color color; private boolean managed; private boolean hoisted; private boolean mentionable; private long rawPermissions; private int rawPosition; public RoleImpl(long id, Guild guild) { this.id = id; this.guild = guild; } @Override public int getPosition() { if (this == guild.getPublicRole()) return -1; //Subtract 1 to get into 0-index, and 1 to disregard the everyone role. int i = guild.getRoles().size() - 2; for (Role r : guild.getRoles()) { if (r == this) return i; i--; } throw new RuntimeException("Somehow when determining position we never found the role in the Guild's roles? wtf?"); } @Override public int getPositionRaw() { return rawPosition; } @Override public String getName() { return name; } @Override public boolean isManaged() { return managed; } @Override public boolean isHoisted() { return hoisted; } @Override public boolean isMentionable() { return mentionable; } @Override public long getPermissionsRaw() { return rawPermissions; } @Override public List<Permission> getPermissions() { return Collections.unmodifiableList( Permission.getPermissions(rawPermissions)); } @Override public Color getColor() { return color; } @Override public boolean isPublicRole() { return this.equals(this.getGuild().getPublicRole()); } @Override public boolean hasPermission(Permission... permissions) { long effectivePerms = rawPermissions | guild.getPublicRole().getPermissionsRaw(); for (Permission perm : permissions) { if (((effectivePerms >> perm.getOffset()) & 1) != 1) return false; } return true; } @Override public boolean hasPermission(Collection<Permission> permissions) { Args.notNull(permissions, "Permission Collection"); return hasPermission(permissions.toArray(new Permission[permissions.size()])); } @Override public boolean hasPermission(Channel channel, Permission... permissions) { long effectivePerms = PermissionUtil.getEffectivePermission(channel, this); for (Permission perm : permissions) { if (((effectivePerms >> perm.getOffset()) & 1) != 1) return false; } return true; } @Override public boolean hasPermission(Channel channel, Collection<Permission> permissions) { Args.notNull(permissions, "Permission Collection"); return hasPermission(channel, permissions.toArray(new Permission[permissions.size()])); } @Override public boolean canInteract(Role role) { return PermissionUtil.canInteract(this, role); } @Override public Guild getGuild() { return guild; } @Override public RoleManager getManager() { RoleManager mng = manager; if (mng == null) { synchronized (mngLock) { mng = manager; if (mng == null) mng = manager = new RoleManager(this); } } return mng; } @Override public RoleManagerUpdatable getManagerUpdatable() { RoleManagerUpdatable mng = managerUpdatable; if (mng == null) { synchronized (mngLock) { mng = managerUpdatable; if (mng == null) mng = managerUpdatable = new RoleManagerUpdatable(this); } } return mng; } @Override public RestAction<Void> delete() { if (!PermissionUtil.checkPermission(getGuild(), getGuild().getSelfMember(), Permission.MANAGE_ROLES)) throw new PermissionException(Permission.MANAGE_ROLES); if(!PermissionUtil.canInteract(getGuild().getSelfMember(), this)) throw new PermissionException("Can't delete role >= highest self-role"); if (managed) throw new UnsupportedOperationException("Cannot delete a Role that is managed. "); Route.CompiledRoute route = Route.Roles.DELETE_ROLE.compile(guild.getId(), getId()); return new RestAction<Void>(getJDA(), route, null) { @Override protected void handleResponse(Response response, Request<Void> request) { if (response.isOk()) request.onSuccess(null); else request.onFailure(response); } }; } @Override public JDA getJDA() { return guild.getJDA(); } @Override public String getAsMention() { return "<@&" + getId() + '>'; } @Override public long getIdLong() { return id; } @Override public boolean equals(Object o) { if (!(o instanceof Role)) return false; Role oRole = (Role) o; return this == oRole || this.getIdLong() == oRole.getIdLong(); } @Override public int hashCode() { return Long.hashCode(id); } @Override public String toString() { return "R:" + getName() + '(' + id + ')'; } @Override public int compareTo(Role r) { if (this == r) return 0; if (!this.getGuild().equals(r.getGuild())) throw new IllegalArgumentException("Cannot compare roles that aren't from the same guild!"); if (this.getPositionRaw() != r.getPositionRaw()) return this.getPositionRaw() - r.getPositionRaw(); OffsetDateTime thisTime = this.getCreationTime(); OffsetDateTime rTime = r.getCreationTime(); //We compare the provided role's time to this's time instead of the reverse as one would expect due to how // discord deals with hierarchy. The more recent a role was created, the lower its hierarchy ranking when // it shares the same position as another role. return rTime.compareTo(thisTime); } // -- Setters -- public RoleImpl setName(String name) { this.name = name; return this; } public RoleImpl setColor(Color color) { this.color = color; return this; } public RoleImpl setManaged(boolean managed) { this.managed = managed; return this; } public RoleImpl setHoisted(boolean hoisted) { this.hoisted = hoisted; return this; } public RoleImpl setMentionable(boolean mentionable) { this.mentionable = mentionable; return this; } public RoleImpl setRawPermissions(long rawPermissions) { this.rawPermissions = rawPermissions; return this; } public RoleImpl setRawPosition(int rawPosition) { this.rawPosition = rawPosition; return this; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.script.groovy; import java.nio.charset.StandardCharsets; import com.google.common.hash.Hashing; import groovy.lang.Binding; import groovy.lang.GroovyClassLoader; import groovy.lang.Script; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Scorer; import org.codehaus.groovy.ast.ClassCodeExpressionTransformer; import org.codehaus.groovy.ast.ClassNode; import org.codehaus.groovy.ast.expr.ConstantExpression; import org.codehaus.groovy.ast.expr.Expression; import org.codehaus.groovy.classgen.GeneratorContext; import org.codehaus.groovy.control.CompilationFailedException; import org.codehaus.groovy.control.CompilePhase; import org.codehaus.groovy.control.CompilerConfiguration; import org.codehaus.groovy.control.SourceUnit; import org.codehaus.groovy.control.customizers.CompilationCustomizer; import org.codehaus.groovy.control.customizers.ImportCustomizer; import org.elasticsearch.SpecialPermission; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.script.*; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.math.BigDecimal; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.HashMap; import java.util.Map; /** * Provides the infrastructure for Groovy as a scripting language for Elasticsearch */ public class GroovyScriptEngineService extends AbstractComponent implements ScriptEngineService { /** * The name of the scripting engine/language. */ public static final String NAME = "groovy"; /** * The setting to enable or disable <code>invokedynamic</code> instruction support in Java 7+. * <p> * Note: If this is disabled because <code>invokedynamic</code> is causing issues, then the Groovy * <code>indy</code> jar needs to be replaced by the non-<code>indy</code> variant of it on the classpath (e.g., * <code>groovy-all-2.4.4-indy.jar</code> should be replaced by <code>groovy-all-2.4.4.jar</code>). * <p> * Defaults to {@code true}. */ public static final String GROOVY_INDY_ENABLED = "script.groovy.indy"; /** * The name of the Groovy compiler setting to use associated with activating <code>invokedynamic</code> support. */ public static final String GROOVY_INDY_SETTING_NAME = "indy"; private final GroovyClassLoader loader; @Inject public GroovyScriptEngineService(Settings settings) { super(settings); ImportCustomizer imports = new ImportCustomizer(); imports.addStarImports("org.joda.time"); imports.addStaticStars("java.lang.Math"); CompilerConfiguration config = new CompilerConfiguration(); config.addCompilationCustomizers(imports); // Add BigDecimal -> Double transformer config.addCompilationCustomizers(new GroovyBigDecimalTransformer(CompilePhase.CONVERSION)); // Implicitly requires Java 7u60 or later to get valid support if (settings.getAsBoolean(GROOVY_INDY_ENABLED, true)) { // maintain any default optimizations config.getOptimizationOptions().put(GROOVY_INDY_SETTING_NAME, true); } // Groovy class loader to isolate Groovy-land code // classloader created here SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new SpecialPermission()); } this.loader = AccessController.doPrivileged(new PrivilegedAction<GroovyClassLoader>() { @Override public GroovyClassLoader run() { return new GroovyClassLoader(getClass().getClassLoader(), config); } }); } @Override public void close() { loader.clearCache(); // close classloader here (why do we do this?) SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new SpecialPermission()); } AccessController.doPrivileged(new PrivilegedAction<Void>() { @Override public Void run() { try { loader.close(); } catch (IOException e) { logger.warn("Unable to close Groovy loader", e); } return null; } }); } @Override public void scriptRemoved(@Nullable CompiledScript script) { // script could be null, meaning the script has already been garbage collected if (script == null || NAME.equals(script.lang())) { // Clear the cache, this removes old script versions from the // cache to prevent running out of PermGen space loader.clearCache(); } } @Override public String[] types() { return new String[]{NAME}; } @Override public String[] extensions() { return new String[]{NAME}; } @Override public boolean sandboxed() { return false; } @Override public Object compile(String script) { try { // we reuse classloader, so do a security check just in case. SecurityManager sm = System.getSecurityManager(); if (sm != null) { sm.checkPermission(new SpecialPermission()); } return loader.parseClass(script, Hashing.sha1().hashString(script, StandardCharsets.UTF_8).toString()); } catch (Throwable e) { if (logger.isTraceEnabled()) { logger.trace("exception compiling Groovy script:", e); } throw new ScriptException("failed to compile groovy script", e); } } /** * Return a script object with the given vars from the compiled script object */ @SuppressWarnings("unchecked") private Script createScript(Object compiledScript, Map<String, Object> vars) throws InstantiationException, IllegalAccessException { Class scriptClass = (Class) compiledScript; Script scriptObject = (Script) scriptClass.newInstance(); Binding binding = new Binding(); binding.getVariables().putAll(vars); scriptObject.setBinding(binding); return scriptObject; } @SuppressWarnings({"unchecked"}) @Override public ExecutableScript executable(CompiledScript compiledScript, Map<String, Object> vars) { try { Map<String, Object> allVars = new HashMap<>(); if (vars != null) { allVars.putAll(vars); } return new GroovyScript(compiledScript, createScript(compiledScript.compiled(), allVars), this.logger); } catch (Exception e) { throw new ScriptException("failed to build executable " + compiledScript, e); } } @SuppressWarnings({"unchecked"}) @Override public SearchScript search(final CompiledScript compiledScript, final SearchLookup lookup, @Nullable final Map<String, Object> vars) { return new SearchScript() { @Override public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { final LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); Map<String, Object> allVars = new HashMap<>(); allVars.putAll(leafLookup.asMap()); if (vars != null) { allVars.putAll(vars); } Script scriptObject; try { scriptObject = createScript(compiledScript.compiled(), allVars); } catch (InstantiationException | IllegalAccessException e) { throw new ScriptException("failed to build search " + compiledScript, e); } return new GroovyScript(compiledScript, scriptObject, leafLookup, logger); } @Override public boolean needsScores() { // TODO: can we reliably know if a groovy script makes use of _score return true; } }; } @Override public Object execute(CompiledScript compiledScript, Map<String, Object> vars) { try { Map<String, Object> allVars = new HashMap<>(); if (vars != null) { allVars.putAll(vars); } Script scriptObject = createScript(compiledScript.compiled(), allVars); return scriptObject.run(); } catch (Exception e) { throw new ScriptException("failed to execute " + compiledScript, e); } } @Override public Object unwrap(Object value) { return value; } public static final class GroovyScript implements ExecutableScript, LeafSearchScript { private final CompiledScript compiledScript; private final Script script; private final LeafSearchLookup lookup; private final Map<String, Object> variables; private final ESLogger logger; public GroovyScript(CompiledScript compiledScript, Script script, ESLogger logger) { this(compiledScript, script, null, logger); } @SuppressWarnings("unchecked") public GroovyScript(CompiledScript compiledScript, Script script, @Nullable LeafSearchLookup lookup, ESLogger logger) { this.compiledScript = compiledScript; this.script = script; this.lookup = lookup; this.logger = logger; this.variables = script.getBinding().getVariables(); } @Override public void setScorer(Scorer scorer) { this.variables.put("_score", new ScoreAccessor(scorer)); } @Override public void setDocument(int doc) { if (lookup != null) { lookup.setDocument(doc); } } @SuppressWarnings({"unchecked"}) @Override public void setNextVar(String name, Object value) { variables.put(name, value); } @Override public void setSource(Map<String, Object> source) { if (lookup != null) { lookup.source().setSource(source); } } @Override public Object run() { try { return script.run(); } catch (Throwable e) { if (logger.isTraceEnabled()) { logger.trace("failed to run " + compiledScript, e); } throw new ScriptException("failed to run " + compiledScript, e); } } @Override public float runAsFloat() { return ((Number) run()).floatValue(); } @Override public long runAsLong() { return ((Number) run()).longValue(); } @Override public double runAsDouble() { return ((Number) run()).doubleValue(); } @Override public Object unwrap(Object value) { return value; } } /** * A compilation customizer that is used to transform a number like 1.23, * which would normally be a BigDecimal, into a double value. */ private class GroovyBigDecimalTransformer extends CompilationCustomizer { private GroovyBigDecimalTransformer(CompilePhase phase) { super(phase); } @Override public void call(final SourceUnit source, final GeneratorContext context, final ClassNode classNode) throws CompilationFailedException { new BigDecimalExpressionTransformer(source).visitClass(classNode); } } /** * Groovy expression transformer that converts BigDecimals to doubles */ private class BigDecimalExpressionTransformer extends ClassCodeExpressionTransformer { private final SourceUnit source; private BigDecimalExpressionTransformer(SourceUnit source) { this.source = source; } @Override protected SourceUnit getSourceUnit() { return this.source; } @Override public Expression transform(Expression expr) { Expression newExpr = expr; if (expr instanceof ConstantExpression) { ConstantExpression constExpr = (ConstantExpression) expr; Object val = constExpr.getValue(); if (val != null && val instanceof BigDecimal) { newExpr = new ConstantExpression(((BigDecimal) val).doubleValue()); } } return super.transform(newExpr); } } }
package com.xboxcollectorsplace.bl.controllers; import java.util.ArrayList; import java.util.Locale; import android.content.Context; import android.content.SharedPreferences; import android.content.SharedPreferences.Editor; import android.text.TextUtils; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import com.xboxcollectorsplace.App; import com.xboxcollectorsplace.bl.BLParameters; import com.xboxcollectorsplace.bl.entities.Catalog; import com.xboxcollectorsplace.bl.entities.Game; import com.xboxcollectorsplace.bl.entities.GamePhotoGallery; import com.xboxcollectorsplace.bl.entities.Options; import com.xboxcollectorsplace.bl.entities.Sort; import com.xboxcollectorsplace.utils.XLog; /** * Controller used to store and recover parameters from Shared Preferences */ public class StorageController { //------------------------------------------------------------------------- PUBLIC METHODS*/ /** * Stores the selected options (Language, boot type and whether the genre or the year is shown * in Smartphones) * * @param options User selected options * @return Boolean indicating the success of the operation */ public synchronized static boolean saveOptions(Options options) { boolean result = false; SharedPreferences preferences; Editor editor; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); editor = preferences.edit(); editor.putString(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_LANGUAGE, options.getLanguage().toString()); editor.putString(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_BOOT, options.getBoot().toString()); editor.putString(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_SHOW_GENRE, options.getShowGenre().toString()); result = editor.commit(); } catch (Exception ex) { result = false; XLog.e("[StorageController.saveOptions]", ex); } return result; } /** * Recovers the selected options (Language, boot type and whether the genre or the year is * shown in Smartphones) * * @return User selected options */ public synchronized static Options loadOptions() { Options options = new Options(); SharedPreferences preferences; Options.Language language; Options.BootType boot; Options.ShowGenre showGenre; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); String languageString = preferences.getString(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_LANGUAGE, null); if (!TextUtils.isEmpty(languageString)) { language = Options.Language.valueOf(languageString); } else { // If the user has not selected any language, the language code of the device // is checked to see if its supported by the App. Elseway the default language // is selected, English String currentLanguage = Locale.getDefault().getLanguage(); if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.GERMAN_CODE)) { language = Options.Language.GERMAN; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.SPANISH_CODE)) { language = Options.Language.SPANISH; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.BASQUE_CODE)) { language = Options.Language.BASQUE; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.FRENCH_CODE)) { language = Options.Language.FRENCH; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.ITALIAN_CODE)) { language = Options.Language.ITALIAN; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.POLISH_CODE)) { language = Options.Language.POLISH; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.JAPANESE_CODE)) { language = Options.Language.JAPANESE; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.KOREAN_CODE)) { language = Options.Language.KOREAN; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.CZECH_CODE)) { language = Options.Language.CZECH; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.RUSSIAN_CODE)) { language = Options.Language.RUSSIAN; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.FINNISH_CODE)) { language = Options.Language.FINNISH; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.SWEDISH_CODE)) { language = Options.Language.SWEDISH; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.NORWEGIAN_CODE)) { language = Options.Language.NORWEGIAN; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.DUTCH_CODE)) { language = Options.Language.DUTCH; } else if (currentLanguage.equalsIgnoreCase(BLParameters.LANGUAGES.PORTUGUESE_CODE)) { language = Options.Language.PORTUGUESE; } else { language = Options.Language.ENGLISH; } } String bootString = preferences.getString(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_BOOT, null); if (!TextUtils.isEmpty(bootString)) { boot = Options.BootType.valueOf(bootString); } else { boot = Options.BootType.OLD; } String showGenreString = preferences.getString(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_SHOW_GENRE, null); if (!TextUtils.isEmpty(showGenreString)) { showGenre = Options.ShowGenre.valueOf(showGenreString); } else { showGenre = Options.ShowGenre.YEAR; } options.setLanguage(language); options.setBoot(boot); options.setShowGenre(showGenre); } catch (Exception ex) { XLog.e("[StorageController.loadOptions]", ex); } return options; } /** * Stores the user selected sorting of the collection/catalog * * @param sort User selected sorting * @return Boolean indicating the success of the operation */ public synchronized static boolean saveSorting(Sort sort) { boolean result = false; SharedPreferences preferences; Editor editor; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); editor = preferences.edit(); editor.putString(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_LIST_SORT, sort.getSort().toString()); result = editor.commit(); } catch (Exception ex) { result = false; XLog.e("[StorageController.saveSorting]", ex); } return result; } /** * Recovers the user selected sorting of the collection/catalog * * @return User selected sorting */ public synchronized static Sort loadSorting() { Sort sort = new Sort(); SharedPreferences preferences; Sort.ListSort listSort; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); String listOrderString = preferences.getString(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_LIST_SORT, null); if (!TextUtils.isEmpty(listOrderString)) { listSort = Sort.ListSort.valueOf(listOrderString); } else { listSort = Sort.ListSort.ASC_TITLE; } sort.setSort(listSort); } catch (Exception ex) { XLog.e("[StorageController.loadSorting]", ex); } return sort; } /** * Stores the boolean indicating if the title screen should have sound * * @param soundActive If true, the title screen should be muted * @return Boolean indicating the success of the operation */ public synchronized static boolean saveSoundActive(boolean soundActive) { boolean result = false; SharedPreferences preferences; Editor editor; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); editor = preferences.edit(); editor.putBoolean(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_SOUND_ACTIVE, soundActive); result = editor.commit(); } catch (Exception ex) { result = false; XLog.e("[StorageController.saveSoundActive]", ex); } return result; } /** * Recovers the boolean indicating if the title screen should have sound * * @return If true, the title screen should be muted */ public synchronized static boolean loadSoundActive() { boolean soundActive = false; SharedPreferences preferences; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); soundActive = preferences.getBoolean(BLParameters.SHARED_PREFERENCES.KEY_OPTIONS_SOUND_ACTIVE, false); } catch (Exception ex) { XLog.e("[StorageController.loadSoundActive]", ex); } return soundActive; } /** * Stores the users game collection * * @param collection Game collection of the user * @param arcadeCollection Boolean indicating if the collection to store is the arcade or * retail collection (true if Arcade) * @return Boolean indicating the success of the operation */ public synchronized static boolean saveCollection(Catalog collection, boolean arcadeCollection) { boolean result = false; SharedPreferences preferences; Editor editor; Gson gson = new Gson(); try { String catalogJSON = gson.toJson(collection); preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); editor = preferences.edit(); if (arcadeCollection) { editor.putString(BLParameters.SHARED_PREFERENCES.KEY_COLLECTION_ARCADE, catalogJSON); } else { editor.putString(BLParameters.SHARED_PREFERENCES.KEY_COLLECTION, catalogJSON); } result = editor.commit(); } catch (Exception ex) { result = false; XLog.e("[StorageController.saveCollection]", ex); } return result; } /** * Recovers the users game collection * * @param arcadeCollection Boolean indicating if the collection to recover is the arcade or * retail collection (true if Arcade) * @return Game collection of the user */ public synchronized static Catalog loadCollection(boolean arcadeCollection) { Catalog collection = new Catalog(); Gson gson = new Gson(); SharedPreferences preferences; String collectionJSON; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); if (arcadeCollection) { collectionJSON = preferences.getString(BLParameters.SHARED_PREFERENCES.KEY_COLLECTION_ARCADE, null); } else { collectionJSON = preferences.getString(BLParameters.SHARED_PREFERENCES.KEY_COLLECTION, null); } if (!TextUtils.isEmpty(collectionJSON)) { collection = gson.fromJson(collectionJSON, Catalog.class); } else { collection = new Catalog(); collection.setCatalog(new ArrayList<Game>()); } } catch (Exception ex) { XLog.e("[StorageController.loadCollection]", ex); } return collection; } /** * Deletes users game collection * * @param arcadeCollection Boolean indicating if the collection to delte is the arcade or * retail collection (true if Arcade) * @return Boolean indicating the success of the operation */ public synchronized static boolean deleteCollection(boolean arcadeCollection) { boolean result = false; SharedPreferences preferences; Editor editor; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); editor = preferences.edit(); if (arcadeCollection) { editor.putString(BLParameters.SHARED_PREFERENCES.KEY_COLLECTION_ARCADE, null); } else { editor.putString(BLParameters.SHARED_PREFERENCES.KEY_COLLECTION, null); } result = editor.commit(); } catch (Exception ex) { result = false; XLog.e("[StorageController.deleteCollection]", ex); } return result; } /** * Adds a game to the users collection * * @param game Game to add * @param arcadeCollection Boolean indicating if the game is going to be added to the arcade * or retail collection (true if Arcade) * @return Boolean indicating the success of the operation */ public synchronized static boolean addGame(Game game, boolean arcadeCollection) { boolean result = false; Catalog catalog; ArrayList<Game> games; try { catalog = loadCollection(arcadeCollection); games = catalog.getCatalog(); games.add(game); catalog.setCatalog(games); result = saveCollection(catalog, arcadeCollection); } catch (Exception ex) { result = false; XLog.e("[StorageController.addGame]", ex); } return result; } /** * Removes a game from the users collection * * @param gameID ID of the game to remove * @param arcadeCollection Boolean indicating if the game belongs to the arcade or * retail collection (true if Arcade) * @return Boolean indicating the success of the operation */ public synchronized static boolean removeGame(int gameID, boolean arcadeCollection) { boolean result = false; Catalog catalog; ArrayList<Game> games; try { catalog = loadCollection(arcadeCollection); games = catalog.getCatalog(); for (Game game : games) { if (game.getId() == gameID) { result = games.remove(game); break; } } catalog.setCatalog(games); result = saveCollection(catalog, arcadeCollection); } catch (Exception ex) { result = false; XLog.e("[StorageController.removeGame]", ex); } return result; } /** * Edits a game of the users collection * * @param game Game to edit * @param arcadeCollection Boolean indicating if the game belongs to the arcade or * retail collection (true if Arcade) * @return Boolean indicating the success of the operation */ public synchronized static boolean editGame(Game editGame, boolean arcadeCollection) { boolean result = false; Catalog catalog; ArrayList<Game> games; try { catalog = loadCollection(arcadeCollection); games = catalog.getCatalog(); for (int i = 0; i < games.size(); i++) { Game game = games.get(i); if (game.getId() == editGame.getId()) { games.set(i, editGame); result = true; break; } } catalog.setCatalog(games); result = saveCollection(catalog, arcadeCollection); } catch (Exception ex) { result = false; XLog.e("[StorageController.editGame]", ex); } return result; } /** * Recovers a game from the users collection * * @param gameID ID of the game to recover * @param arcadeCollection Boolean indicating if the game belongs to the arcade or * retail collection (true if Arcade) * @return Game recovered */ public synchronized static Game loadGame(int gameID, boolean arcadeCollection) { Catalog catalog; ArrayList<Game> games; Game returnGame = null; try { catalog = loadCollection(arcadeCollection); games = catalog.getCatalog(); for (int i = 0; i < games.size(); i++) { Game game = games.get(i); if (game.getId() == gameID) { returnGame = game; break; } } } catch (Exception ex) { XLog.e("[StorageController.loadGame]", ex); } return returnGame; } /** * Checks if a game exists in the users collection * * @param gameID ID of the game to check * @param arcadeCollection Boolean indicating if the game belongs to the arcade or * retail collection (true if Arcade) * @return Boolean indicating if the game exists */ public synchronized static boolean existsGame(int gameID, boolean arcadeCollection) { boolean exists = false; Catalog catalog; ArrayList<Game> games; try { catalog = loadCollection(arcadeCollection); games = catalog.getCatalog(); for (Game game : games) { if (game.getId() == gameID) { exists = true; break; } } } catch (Exception ex) { exists = false; XLog.e("[StorageController.existsGame]", ex); } return exists; } /** * Stores the info of the photo gallery * * @param photoGallery Array with the info of the photo gallery * @return Boolean indicating the success of the operation */ public synchronized static boolean savePhotoGallery(ArrayList<GamePhotoGallery> photoGallery) { boolean result = false; Gson gson = new Gson(); SharedPreferences preferences; Editor editor; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); editor = preferences.edit(); editor.putString(BLParameters.SHARED_PREFERENCES.KEY_PHOTO_GALLERY, gson.toJson(photoGallery)); result = editor.commit(); } catch (Exception ex) { result = false; XLog.e("[StorageController.savePhotoGallery]", ex); } return result; } /** * Recovers the info of the photo gallery * * @return Array with the info of the photo gallery */ public synchronized static ArrayList<GamePhotoGallery> loadPhotoGallery() { ArrayList<GamePhotoGallery> photoGallery = new ArrayList<GamePhotoGallery>(); Gson gson = new Gson(); SharedPreferences preferences; String photoGalleryJSON; try { preferences = App.getContext().getSharedPreferences(BLParameters.SHARED_PREFERENCES.NAME, Context.MODE_PRIVATE); photoGalleryJSON = preferences.getString(BLParameters.SHARED_PREFERENCES.KEY_PHOTO_GALLERY, null); if (!TextUtils.isEmpty(photoGalleryJSON)) { photoGallery = gson.fromJson(photoGalleryJSON, new TypeToken<ArrayList<GamePhotoGallery>>(){}.getType()); } else { photoGallery = new ArrayList<GamePhotoGallery>(); } } catch (Exception ex) { XLog.e("[StorageController.loadPhotoGallery]", ex); } return photoGallery; } }
/* $Header: //info.ravenbrook.com/project/jili/version/1.1/code/mnj/lua/LuaTable.java#1 $ * Copyright (c) 2006 Nokia Corporation and/or its subsidiary(-ies). * All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject * to the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package mnj.lua; import java.util.Iterator; import java.util.Enumeration; //patched to hashmap? //GWT doesnt have hashtable /** * Class that models Lua's tables. Each Lua table is an instance of * this class. Whilst you can clearly see that this class extends * {@link java.util.Hashtable} you should in no way rely upon that. * Calling any methods that are not defined in this class (but are * defined in a super class) is extremely deprecated. */ public final class LuaTable extends java.util.HashMap { private static final int MAXBITS = 26; private static final int MAXASIZE = 1 << MAXBITS; private LuaTable metatable; // = null; private static final Object[] ZERO = new Object[0]; /** * Array used so that tables accessed like arrays are more efficient. * All elements stored at an integer index, <var>i</var>, in the * range [1,sizeArray] are stored at <code>array[i-1]</code>. * This speed and space usage for array-like access. * When the table is rehashed the array's size is chosen to be the * largest power of 2 such that at least half the entries are * occupied. Default access granted for {@link Enum} class, do not * abuse. */ Object[] array = ZERO; /** * Equal to <code>array.length</code>. Default access granted for * {@link Enum} class, do not abuse. */ int sizeArray; // = 0; /** * <code>true</code> whenever we are in the {@link #rehash} * method. Avoids infinite rehash loops. */ private boolean inrehash; // = false; LuaTable() { super(1); } /** * Fresh LuaTable with hints for preallocating to size. * @param narray number of array slots to preallocate. * @param nhash number of hash slots to preallocate. */ LuaTable(int narray, int nhash) { // :todo: super(nhash) isn't clearly correct as adding nhash hash // table entries will causes a rehash with the usual implementation // (which rehashes when ratio of entries to capacity exceeds the // load factor of 0.75). Perhaps ideally we would size the hash // tables such that adding nhash entries will not cause a rehash. super(nhash); array = new Object[narray]; for (int i=0; i<narray; ++i) { array[i] = Lua.NIL; } sizeArray = narray; } /** * Implements discriminating equality. <code>o1.equals(o2) == (o1 == * o2) </code>. This method is not necessary in CLDC, it's only * necessary in J2SE because java.util.Hashtable overrides equals. * @param o the reference to compare with. * @return true when equal. */ public boolean equals(Object o) { return this == o; } /** * Provided to avoid Checkstyle warning. This method is not necessary * for correctness (in neither JME nor JSE), it's only provided to * remove a Checkstyle warning. * Since {@link #equals} implements the most discriminating * equality possible, this method can have any implementation. * @return an int. */ public int hashCode() { return System.identityHashCode(this); } private static int arrayindex(Object key) { if (key instanceof Double) { double d = ((Double)key).doubleValue(); int k = (int)d; if (k == d) { return k; } } return -1; // 'key' did not match some condition } private static int computesizes(int[] nums, int[] narray) { final int t = narray[0]; int a = 0; // number of elements smaller than 2^i int na = 0; // number of elements to go to array part int n = 0; // optimal size for array part int twotoi = 1; // 2^i for (int i=0; twotoi/2 < t; ++i) { if (nums[i] > 0) { a += nums[i]; if (a > twotoi/2) // more than half elements present? { n = twotoi; // optimal size (till now) na = a; // all elements smaller than n will go to array part } } if (a == t) // all elements already counted { break; } twotoi *= 2; } narray[0] = n; //# assert narray[0]/2 <= na && na <= narray[0] return na; } private int countint(Object key, int[] nums) { int k = arrayindex(key); if (0 < k && k <= MAXASIZE) // is 'key' an appropriate array index? { ++nums[ceillog2(k)]; // count as such return 1; } return 0; } private int numusearray(int[] nums) { int ause = 0; // summation of 'nums' int i = 1; // count to traverse all array keys int ttlg = 1; // 2^lg for(int lg = 0; lg <= MAXBITS; ++lg) // for each slice { int lc = 0; // counter int lim = ttlg; if (lim > sizeArray) { lim = sizeArray; // adjust upper limit if (i > lim) { break; // no more elements to count } } // count elements in range (2^(lg-1), 2^lg] for (; i <= lim; ++i) { if (array[i-1] != Lua.NIL) { ++lc; } } nums[lg] += lc; ause += lc; ttlg *= 2; } return ause; } private int numusehash(int[] nums, int[] pnasize) { int totaluse = 0; // total number of elements int ause = 0; // summation of nums Iterator e; e = (Iterator)super.values(); while (e.hasNext()) { Object o =e. next(); ause += countint(o, nums); ++totaluse; } pnasize[0] += ause; return totaluse; } /** * @param nasize (new) size of array part */ private void resize(int nasize) { if (nasize == sizeArray) { return; } Object[] newarray = new Object[nasize]; if (nasize > sizeArray) // array part must grow? { // The new array slots, from sizeArray to nasize-1, must // be filled with their values from the hash part. // There are two strategies: // Iterate over the new array slots, and look up each index in the // hash part to see if it has a value; or, // Iterate over the hash part and see if each key belongs in the // array part. // For now we choose the first algorithm. // :todo: consider using second algorithm, possibly dynamically. System.arraycopy(array, 0, newarray, 0, array.length); for (int i=array.length; i<nasize; ++i) { Object key = new Double(i+1); Object v = super.remove(key); if (v == null) { v = Lua.NIL; } newarray[i] = v; } } if (nasize < sizeArray) // array part must shrink? { // move elements from array slots nasize to sizeArray-1 to the // hash part. for (int i=nasize; i<sizeArray; ++i) { if (array[i] != Lua.NIL) { Object key = new Double(i+1); super.put(key, array[i]); } } System.arraycopy(array, 0, newarray, 0, newarray.length); } array = newarray; sizeArray = array.length; } protected void rehash() { boolean oldinrehash = inrehash; inrehash = true; if (!oldinrehash) { int[] nasize = new int[1]; int[] nums = new int[MAXBITS+1]; nasize[0] = numusearray(nums); // count keys in array part int totaluse = nasize[0]; totaluse += numusehash(nums, nasize); int na = computesizes(nums, nasize); resize(nasize[0]); } // super.rehash(); inrehash = oldinrehash; } /** * Getter for metatable member. * @return The metatable. */ LuaTable getMetatable() { return metatable; } /** * Setter for metatable member. * @param metatable The metatable. */ // :todo: Support metatable's __gc and __mode keys appropriately. // This involves detecting when those keys are present in the // metatable, and changing all the entries in the Hashtable // to be instance of java.lang.Ref as appropriate. void setMetatable(LuaTable metatable) { this.metatable = metatable; return; } /** * Supports Lua's length (#) operator. More or less equivalent to * luaH_getn and unbound_search in ltable.c. */ int getn() { int j = sizeArray; if (j > 0 && array[j-1] == Lua.NIL) { // there is a boundary in the array part: (binary) search for it int i = 0; while (j - i > 1) { int m = (i+j)/2; if (array[m-1] == Lua.NIL) { j = m; } else { i = m; } } return i; } // unbound_search int i = 0; j = 1; // Find 'i' and 'j' such that i is present and j is not. while (this.getnum(j) != Lua.NIL) { i = j; j *= 2; if (j < 0) // overflow { // Pathological case. Linear search. i = 1; while (this.getnum(i) != Lua.NIL) { ++i; } return i-1; } } // binary search between i and j while (j - i > 1) { int m = (i+j)/2; if (this.getnum(m) == Lua.NIL) { j = m; } else { i = m; } } return i; } /** * Like {@link java.util.Hashtable#get}. Ensures that indexes * with no value return {@link Lua#NIL}. In order to get the correct * behaviour for <code>t[nil]</code>, this code assumes that Lua.NIL * is non-<code>null</code>. */ public Object getlua(Object key) { if (key instanceof Double) { double d = ((Double)key).doubleValue(); if (d <= sizeArray && d >=1) { int i = (int)d; if (i == d) { return array[i-1]; } } } Object r = super.get(key); if (r == null) { r = Lua.NIL; } return r; } /** * Like {@link #getlua(Object)} but the result is written into * the <var>value</var> {@link Slot}. */ public void getlua(Slot key, Slot value) { if (key.r == Lua.NUMBER) { double d = key.d; if (d <= sizeArray && d >= 1) { int i = (int)d; if (i == d) { value.setObject(array[i-1]); return; } } } Object r = super.get(key.asObject()); if (r == null) { r = Lua.NIL; } value.setObject(r); } /** Like get for numeric (integer) keys. */ public Object getnum(int k) { if (k <= sizeArray && k >= 1) { return array[k-1]; } Object r = super.get(new Double(k)); if (r == null) { return Lua.NIL; } return r; } /** * Like {@link java.util.Hashtable#put} but enables Lua's semantics * for <code>nil</code>; * In particular that <code>x = nil</nil> * deletes <code>x</code>. * And also that <code>t[nil]</code> raises an error. * Generally, users of Jill should be using * {@link Lua#setTable} instead of this. * @param key key. * @param value value. */ public void putlua(Lua L, Object key, Object value) { double d = 0.0; int i = Integer.MAX_VALUE; if (key == Lua.NIL) { L.gRunerror("table index is nil"); } if (key instanceof Double) { d = ((Double)key).doubleValue(); int j = (int)d; if (j == d && j >= 1) { i = j; // will cause additional check for array part later if // the array part check fails now. if (i <= sizeArray) { array[i-1] = value; return; } } if (Double.isNaN(d)) { L.gRunerror("table index is NaN"); } } // :todo: Consider checking key for NaN (PUC-Rio does) if (value == Lua.NIL) { remove(key); return; } super.put(key, value); // This check is necessary because sometimes the call to super.put // can rehash and the new (k,v) pair should be in the array part // after the rehash, but is still in the hash part. if (i <= sizeArray) { remove(key); array[i-1] = value; } } public void putlua(Lua L, Slot key, Object value) { int i = Integer.MAX_VALUE; if (key.r == Lua.NUMBER) { int j = (int)key.d; if (j == key.d && j >= 1) { i = j; if (i <= sizeArray) { array[i-1] = value; return; } } if (Double.isNaN(key.d)) { L.gRunerror("table index is NaN"); } } Object k = key.asObject(); // :todo: consider some sort of tail merge with the other putlua if (value == Lua.NIL) { remove(k); return; } super.put(k, value); if (i <= sizeArray) { remove(k); array[i-1] = value; } } /** * Like put for numeric (integer) keys. */ public void putnum(int k, Object v) { if (k <= sizeArray && k >= 1) { array[k-1] = v; return; } // The key can never be NIL so putlua will never notice that its L // argument is null. // :todo: optimisation to avoid putlua checking for array part again putlua(null, new Double(k), v); } /** * Do not use, implementation exists only to generate deprecated * warning. * @deprecated Use getlua instead. */ public Object get(Object key) { throw new IllegalArgumentException(); } public Enumeration keys() { return new Enum(this, (Iterator)super.values()); } /** * Do not use, implementation exists only to generate deprecated * warning. * @deprecated Use putlua instead. */ public Object put(Object key, Object value) { throw new IllegalArgumentException(); } /** * Used by oLog2. DO NOT MODIFY. */ private static final byte[] LOG2 = new byte[] { 0,1,2,2,3,3,3,3,4,4,4,4,4,4,4,4,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5, 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8, 8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8 }; /** * Equivalent to luaO_log2. */ private static int oLog2(int x) { //# assert x >= 0 int l = -1; while (x >= 256) { l += 8; x >>>= 8; } return l + LOG2[x]; } private static int ceillog2(int x) { return oLog2(x-1)+1; } } final class Enum implements Enumeration { private LuaTable t; private int i; // = 0 private Iterator e; Enum(LuaTable t, Iterator e) { this.t = t; this.e = e; inci(); } /** * Increments {@link #i} until it either exceeds * <code>t.sizeArray</code> or indexes a non-nil element. */ void inci() { while (i < t.sizeArray && t.array[i] == Lua.NIL) { ++i; } } public boolean hasMoreElements() { if (i < t.sizeArray) { return true; } return e.hasNext(); } public Object nextElement() { Object r; if (i < t.sizeArray) { ++i; // array index i corresponds to key i+1 r = new Double(i); inci(); } else { r = e.next(); } return r; } }
/* * (C) Copyright 2014-2016 mjahnen <jahnen@in.tum.de> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.github.mjdev.libaums.fs.fat32; import java.io.IOException; import java.nio.ByteBuffer; import com.github.mjdev.libaums.driver.BlockDeviceDriver; import com.github.mjdev.libaums.fs.UsbFile; public class FatFile implements UsbFile { private BlockDeviceDriver blockDevice; private FAT fat; private Fat32BootSector bootSector; private FatDirectory parent; private ClusterChain chain; private FatLfnDirectoryEntry entry; /** * Constructs a new file with the given information. * * @param blockDevice * The device where the file system is located. * @param fat * The FAT used to follow cluster chains. * @param bootSector * The boot sector of the file system. * @param entry * The corresponding entry in a FAT directory. * @param parent * The parent directory of the newly constructed file. */ private FatFile(BlockDeviceDriver blockDevice, FAT fat, Fat32BootSector bootSector, FatLfnDirectoryEntry entry, FatDirectory parent) { this.blockDevice = blockDevice; this.fat = fat; this.bootSector = bootSector; this.entry = entry; this.parent = parent; } /** * Creates a new file with the given information. * * @param entry * The corresponding entry in a FAT directory. * @param blockDevice * The device where the file system is located. * @param fat * The FAT used to follow cluster chains. * @param bootSector * The boot sector of the file system. * @param parent * The parent directory of the newly created file. * @return The newly constructed file. * @throws IOException * If reading from device fails. */ public static FatFile create(FatLfnDirectoryEntry entry, BlockDeviceDriver blockDevice, FAT fat, Fat32BootSector bootSector, FatDirectory parent) throws IOException { return new FatFile(blockDevice, fat, bootSector, entry, parent); } /** * Initializes the cluster chain to access the contents of the file. * * @throws IOException * If reading from FAT fails. */ private void initChain() throws IOException { if (chain == null) { chain = new ClusterChain(entry.getStartCluster(), blockDevice, fat, bootSector); } } @Override public UsbFile search(String path) { throw new UnsupportedOperationException("This is a file!"); } @Override public boolean isDirectory() { return false; } @Override public String getName() { return entry.getName(); } @Override public void setName(String newName) throws IOException { parent.renameEntry(entry, newName); } @Override public long createdAt() { return entry.getActualEntry().getCreatedDateTime(); } @Override public long lastModified() { return entry.getActualEntry().getLastModifiedDateTime(); } @Override public long lastAccessed() { return entry.getActualEntry().getLastAccessedDateTime(); } @Override public UsbFile getParent() { return parent; } @Override public String[] list() { throw new UnsupportedOperationException("This is a file!"); } @Override public UsbFile[] listFiles() throws IOException { throw new UnsupportedOperationException("This is a file!"); } @Override public long getLength() { return entry.getFileSize(); } @Override public void setLength(long newLength) throws IOException { initChain(); chain.setLength(newLength); entry.setFileSize(newLength); } @Override public void read(long offset, ByteBuffer destination) throws IOException { initChain(); entry.setLastAccessedTimeToNow(); chain.read(offset, destination); } @Override public void write(long offset, ByteBuffer source) throws IOException { initChain(); long length = offset + source.remaining(); if (length > getLength()) setLength(length); entry.setLastModifiedTimeToNow(); chain.write(offset, source); } @Override public void flush() throws IOException { // we only have to update the parent because we are always writing // everything // immediately to the device // the parent directory is responsible for updating the // FatDirectoryEntry which // contains things like the file size and the date time fields parent.write(); } @Override public void close() throws IOException { flush(); } @Override public UsbFile createDirectory(String name) throws IOException { throw new UnsupportedOperationException("This is a file!"); } @Override public UsbFile createFile(String name) throws IOException { throw new UnsupportedOperationException("This is a file!"); } @Override public void moveTo(UsbFile destination) throws IOException { parent.move(entry, destination); parent = (FatDirectory) destination; } @Override public void delete() throws IOException { initChain(); parent.removeEntry(entry); parent.write(); chain.setLength(0); } @Override public boolean isRoot() { return false; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.core.xml; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import org.apache.camel.CamelContext; import org.apache.camel.LoggingLevel; import org.apache.camel.processor.RedeliveryPolicy; import org.apache.camel.util.CamelContextHelper; /** * A factory which instantiates {@link RedeliveryPolicy} objects * * @version */ @XmlAccessorType(XmlAccessType.FIELD) public abstract class AbstractCamelRedeliveryPolicyFactoryBean extends AbstractCamelFactoryBean<RedeliveryPolicy> { @XmlAttribute private String maximumRedeliveries; @XmlAttribute private String redeliveryDelay; @XmlAttribute private String asyncDelayedRedelivery; @XmlAttribute private String backOffMultiplier; @XmlAttribute private String useExponentialBackOff; @XmlAttribute private String collisionAvoidanceFactor; @XmlAttribute private String useCollisionAvoidance; @XmlAttribute private String maximumRedeliveryDelay; @XmlAttribute private LoggingLevel retriesExhaustedLogLevel; @XmlAttribute private LoggingLevel retryAttemptedLogLevel; @XmlAttribute private String logRetryAttempted; @XmlAttribute private String logStackTrace; @XmlAttribute private String logRetryStackTrace; @XmlAttribute private String logHandled; @XmlAttribute private String logContinued; @XmlAttribute private String logExhausted; @XmlAttribute private String disableRedelivery; @XmlAttribute private String delayPattern; @XmlAttribute private String allowRedeliveryWhileStopping; @XmlAttribute private String exchangeFormatterRef; public RedeliveryPolicy getObject() throws Exception { RedeliveryPolicy answer = new RedeliveryPolicy(); CamelContext context = getCamelContext(); // copy across the properties - if they are set if (maximumRedeliveries != null) { answer.setMaximumRedeliveries(CamelContextHelper.parseInteger(context, maximumRedeliveries)); } if (redeliveryDelay != null) { answer.setRedeliveryDelay(CamelContextHelper.parseLong(context, redeliveryDelay)); } if (asyncDelayedRedelivery != null) { if (CamelContextHelper.parseBoolean(context, asyncDelayedRedelivery)) { answer.asyncDelayedRedelivery(); } } if (retriesExhaustedLogLevel != null) { answer.setRetriesExhaustedLogLevel(retriesExhaustedLogLevel); } if (retryAttemptedLogLevel != null) { answer.setRetryAttemptedLogLevel(retryAttemptedLogLevel); } if (backOffMultiplier != null) { answer.setBackOffMultiplier(CamelContextHelper.parseDouble(context, backOffMultiplier)); } if (useExponentialBackOff != null) { answer.setUseExponentialBackOff(CamelContextHelper.parseBoolean(context, useExponentialBackOff)); } if (collisionAvoidanceFactor != null) { answer.setCollisionAvoidanceFactor(CamelContextHelper.parseDouble(context, collisionAvoidanceFactor)); } if (useCollisionAvoidance != null) { answer.setUseCollisionAvoidance(CamelContextHelper.parseBoolean(context, useCollisionAvoidance)); } if (maximumRedeliveryDelay != null) { answer.setMaximumRedeliveryDelay(CamelContextHelper.parseLong(context, maximumRedeliveryDelay)); } if (logStackTrace != null) { answer.setLogStackTrace(CamelContextHelper.parseBoolean(context, logStackTrace)); } if (logRetryStackTrace != null) { answer.setLogRetryStackTrace(CamelContextHelper.parseBoolean(context, logRetryStackTrace)); } if (logHandled != null) { answer.setLogHandled(CamelContextHelper.parseBoolean(context, logHandled)); } if (logContinued != null) { answer.setLogContinued(CamelContextHelper.parseBoolean(context, logContinued)); } if (logRetryAttempted != null) { answer.setLogRetryAttempted(CamelContextHelper.parseBoolean(context, logRetryAttempted)); } if (logExhausted != null) { answer.setLogExhausted(CamelContextHelper.parseBoolean(context, logExhausted)); } if (disableRedelivery != null) { if (CamelContextHelper.parseBoolean(context, disableRedelivery)) { answer.setMaximumRedeliveries(0); } } if (delayPattern != null) { answer.setDelayPattern(CamelContextHelper.parseText(context, delayPattern)); } if (allowRedeliveryWhileStopping != null) { answer.setAllowRedeliveryWhileStopping(CamelContextHelper.parseBoolean(context, allowRedeliveryWhileStopping)); } if (exchangeFormatterRef != null) { answer.setExchangeFormatterRef(exchangeFormatterRef); } return answer; } public Class<RedeliveryPolicy> getObjectType() { return RedeliveryPolicy.class; } public String getMaximumRedeliveries() { return maximumRedeliveries; } public void setMaximumRedeliveries(String maximumRedeliveries) { this.maximumRedeliveries = maximumRedeliveries; } public String getRedeliveryDelay() { return redeliveryDelay; } public void setRedeliveryDelay(String redeliveryDelay) { this.redeliveryDelay = redeliveryDelay; } public String getAsyncDelayedRedelivery() { return asyncDelayedRedelivery; } public void setAsyncDelayedRedelivery(String asyncDelayedRedelivery) { this.asyncDelayedRedelivery = asyncDelayedRedelivery; } public String getBackOffMultiplier() { return backOffMultiplier; } public void setBackOffMultiplier(String backOffMultiplier) { this.backOffMultiplier = backOffMultiplier; } public String getUseExponentialBackOff() { return useExponentialBackOff; } public void setUseExponentialBackOff(String useExponentialBackOff) { this.useExponentialBackOff = useExponentialBackOff; } public String getCollisionAvoidanceFactor() { return collisionAvoidanceFactor; } public void setCollisionAvoidanceFactor(String collisionAvoidanceFactor) { this.collisionAvoidanceFactor = collisionAvoidanceFactor; } public String getUseCollisionAvoidance() { return useCollisionAvoidance; } public void setUseCollisionAvoidance(String useCollisionAvoidance) { this.useCollisionAvoidance = useCollisionAvoidance; } public String getMaximumRedeliveryDelay() { return maximumRedeliveryDelay; } public void setMaximumRedeliveryDelay(String maximumRedeliveryDelay) { this.maximumRedeliveryDelay = maximumRedeliveryDelay; } public LoggingLevel getRetriesExhaustedLogLevel() { return retriesExhaustedLogLevel; } public void setRetriesExhaustedLogLevel(LoggingLevel retriesExhaustedLogLevel) { this.retriesExhaustedLogLevel = retriesExhaustedLogLevel; } public LoggingLevel getRetryAttemptedLogLevel() { return retryAttemptedLogLevel; } public void setRetryAttemptedLogLevel(LoggingLevel retryAttemptedLogLevel) { this.retryAttemptedLogLevel = retryAttemptedLogLevel; } public String getLogRetryAttempted() { return logRetryAttempted; } public void setLogRetryAttempted(String logRetryAttempted) { this.logRetryAttempted = logRetryAttempted; } public String getLogStackTrace() { return logStackTrace; } public void setLogStackTrace(String logStackTrace) { this.logStackTrace = logStackTrace; } public String getLogRetryStackTrace() { return logRetryStackTrace; } public void setLogRetryStackTrace(String logRetryStackTrace) { this.logRetryStackTrace = logRetryStackTrace; } public String getLogHandled() { return logHandled; } public void setLogHandled(String logHandled) { this.logHandled = logHandled; } public String getLogContinued() { return logContinued; } public void setLogContinued(String logContinued) { this.logContinued = logContinued; } public String getLogExhausted() { return logExhausted; } public void setLogExhausted(String logExhausted) { this.logExhausted = logExhausted; } public String getDisableRedelivery() { return disableRedelivery; } public void setDisableRedelivery(String disableRedelivery) { this.disableRedelivery = disableRedelivery; } public String getDelayPattern() { return delayPattern; } public void setDelayPattern(String delayPattern) { this.delayPattern = delayPattern; } public String getAllowRedeliveryWhileStopping() { return allowRedeliveryWhileStopping; } public void setAllowRedeliveryWhileStopping(String allowRedeliveryWhileStopping) { this.allowRedeliveryWhileStopping = allowRedeliveryWhileStopping; } public String getExchangeFormatterRef() { return exchangeFormatterRef; } public void setExchangeFormatterRef(String exchangeFormatterRef) { this.exchangeFormatterRef = exchangeFormatterRef; } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.kubernetes.producer; import java.util.Map; import io.fabric8.kubernetes.api.model.DoneableService; import io.fabric8.kubernetes.api.model.EditableService; import io.fabric8.kubernetes.api.model.Service; import io.fabric8.kubernetes.api.model.ServiceBuilder; import io.fabric8.kubernetes.api.model.ServiceList; import io.fabric8.kubernetes.api.model.ServiceSpec; import io.fabric8.kubernetes.client.KubernetesClient; import io.fabric8.kubernetes.client.dsl.ClientMixedOperation; import io.fabric8.kubernetes.client.dsl.ClientNonNamespaceOperation; import io.fabric8.kubernetes.client.dsl.ClientOperation; import io.fabric8.kubernetes.client.dsl.ClientResource; import org.apache.camel.Exchange; import org.apache.camel.component.kubernetes.KubernetesConstants; import org.apache.camel.component.kubernetes.KubernetesEndpoint; import org.apache.camel.impl.DefaultProducer; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class KubernetesServicesProducer extends DefaultProducer { private static final Logger LOG = LoggerFactory .getLogger(KubernetesServicesProducer.class); public KubernetesServicesProducer(KubernetesEndpoint endpoint) { super(endpoint); } @Override public KubernetesEndpoint getEndpoint() { return (KubernetesEndpoint) super.getEndpoint(); } @Override public void process(Exchange exchange) throws Exception { String operation; if (ObjectHelper.isEmpty(getEndpoint().getKubernetesConfiguration() .getOperation())) { operation = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_OPERATION, String.class); } else { operation = getEndpoint().getKubernetesConfiguration() .getOperation(); } switch (operation) { case KubernetesOperations.LIST_SERVICES_OPERATION: doList(exchange, operation); break; case KubernetesOperations.LIST_SERVICES_BY_LABELS_OPERATION: doListServiceByLabels(exchange, operation); break; case KubernetesOperations.GET_SERVICE_OPERATION: doGetService(exchange, operation); break; case KubernetesOperations.CREATE_SERVICE_OPERATION: doCreateService(exchange, operation); break; case KubernetesOperations.DELETE_SERVICE_OPERATION: doDeleteService(exchange, operation); break; default: throw new IllegalArgumentException("Unsupported operation " + operation); } } protected void doList(Exchange exchange, String operation) throws Exception { ServiceList servicesList = null; String namespaceName = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class); if (!ObjectHelper.isEmpty(namespaceName)) { servicesList = getEndpoint().getKubernetesClient().services() .inNamespace(namespaceName).list(); } else { servicesList = getEndpoint().getKubernetesClient().services() .list(); } exchange.getOut().setBody(servicesList.getItems()); } protected void doListServiceByLabels(Exchange exchange, String operation) throws Exception { ServiceList servicesList = null; Map<String, String> labels = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_SERVICE_LABELS, Map.class); String namespaceName = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class); if (!ObjectHelper.isEmpty(namespaceName)) { ClientNonNamespaceOperation<Service, ServiceList, DoneableService, ClientResource<Service, DoneableService>> services; services = getEndpoint().getKubernetesClient().services() .inNamespace(namespaceName); for (Map.Entry<String, String> entry : labels.entrySet()) { services.withLabel(entry.getKey(), entry.getValue()); } servicesList = services.list(); } else { ClientMixedOperation<Service, ServiceList, DoneableService, ClientResource<Service, DoneableService>> services; services = getEndpoint().getKubernetesClient().services(); for (Map.Entry<String, String> entry : labels.entrySet()) { services.withLabel(entry.getKey(), entry.getValue()); } servicesList = services.list(); } exchange.getOut().setBody(servicesList.getItems()); } protected void doGetService(Exchange exchange, String operation) throws Exception { Service service = null; String serviceName = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_SERVICE_NAME, String.class); String namespaceName = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class); if (ObjectHelper.isEmpty(serviceName)) { LOG.error("Get a specific service require specify a service name"); throw new IllegalArgumentException( "Get a specific service require specify a service name"); } if (ObjectHelper.isEmpty(namespaceName)) { LOG.error("Get a specific service require specify a namespace name"); throw new IllegalArgumentException( "Get a specific service require specify a namespace name"); } service = getEndpoint().getKubernetesClient().services() .inNamespace(namespaceName).withName(serviceName).get(); exchange.getOut().setBody(service); } protected void doCreateService(Exchange exchange, String operation) throws Exception { Service service = null; String serviceName = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_SERVICE_NAME, String.class); String namespaceName = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class); ServiceSpec serviceSpec = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_SERVICE_SPEC, ServiceSpec.class); if (ObjectHelper.isEmpty(serviceName)) { LOG.error("Create a specific service require specify a service name"); throw new IllegalArgumentException( "Create a specific service require specify a service name"); } if (ObjectHelper.isEmpty(namespaceName)) { LOG.error("Create a specific service require specify a namespace name"); throw new IllegalArgumentException( "Create a specific service require specify a namespace name"); } if (ObjectHelper.isEmpty(serviceSpec)) { LOG.error("Create a specific service require specify a service spec bean"); throw new IllegalArgumentException( "Create a specific service require specify a service spec bean"); } Map<String, String> labels = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_SERVICE_LABELS, Map.class); EditableService serviceCreating = new ServiceBuilder() .withNewMetadata().withName(serviceName).withLabels(labels) .endMetadata().withSpec(serviceSpec).build(); service = getEndpoint().getKubernetesClient().services() .inNamespace(namespaceName).create(serviceCreating); exchange.getOut().setBody(service); } protected void doDeleteService(Exchange exchange, String operation) throws Exception { String serviceName = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_SERVICE_NAME, String.class); String namespaceName = exchange.getIn().getHeader( KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class); if (ObjectHelper.isEmpty(serviceName)) { LOG.error("Delete a specific service require specify a service name"); throw new IllegalArgumentException( "Delete a specific service require specify a service name"); } if (ObjectHelper.isEmpty(namespaceName)) { LOG.error("Delete a specific service require specify a namespace name"); throw new IllegalArgumentException( "Delete a specific service require specify a namespace name"); } boolean serviceDeleted = getEndpoint().getKubernetesClient().services() .inNamespace(namespaceName).withName(serviceName).delete(); exchange.getOut().setBody(serviceDeleted); } }
package org.wikidata.wdtk.examples; /* * #%L * Wikidata Toolkit Examples * %% * Copyright (C) 2014 - 2015 Wikidata Toolkit Developers * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.TreeMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Iterator; import java.util.Collections; import org.wikidata.wdtk.datamodel.helpers.Datamodel; import org.wikidata.wdtk.datamodel.interfaces.EntityDocumentProcessor; import org.wikidata.wdtk.datamodel.interfaces.EntityDocument; import org.wikidata.wdtk.datamodel.interfaces.EntityIdValue; import org.wikidata.wdtk.datamodel.interfaces.ItemDocument; import org.wikidata.wdtk.datamodel.interfaces.ItemIdValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyIdValue; import org.wikidata.wdtk.datamodel.interfaces.QuantityValue; import org.wikidata.wdtk.datamodel.interfaces.TimeValue; import org.wikidata.wdtk.datamodel.interfaces.StringValue; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.datamodel.interfaces.MonolingualTextValue; import org.wikidata.wdtk.datamodel.interfaces.PropertyDocument; import org.wikidata.wdtk.datamodel.interfaces.SiteLink; import org.wikidata.wdtk.datamodel.interfaces.Statement; import org.wikidata.wdtk.datamodel.interfaces.StatementGroup; import org.wikidata.wdtk.datamodel.interfaces.Value; import org.wikidata.wdtk.datamodel.interfaces.ValueSnak; import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; public class TypeHistogram implements EntityDocumentProcessor { int nprocessed = 0; TreeMap<String, Integer> histogram = Maps.newTreeMap(); //public LinkedHashMap sortHashMapByValuesD(HashMap passedMap) { public LinkedHashMap<String, Integer> sortHashMapByValuesD(TreeMap<String, Integer> passedMap) { List mapKeys = new ArrayList(passedMap.keySet()); List mapValues = new ArrayList(passedMap.values()); Collections.sort(mapValues); Collections.sort(mapKeys); LinkedHashMap<String,Integer> sortedMap = new LinkedHashMap<String,Integer>(); Iterator valueIt = mapValues.iterator(); while (valueIt.hasNext()) { Object val = valueIt.next(); Iterator keyIt = mapKeys.iterator(); while (keyIt.hasNext()) { Object key = keyIt.next(); String comp1 = passedMap.get(key).toString(); String comp2 = val.toString(); if (comp1.equals(comp2)){ //passedMap.remove(key); mapKeys.remove(key); sortedMap.put((String)key, (Integer)val); break; } } } return sortedMap; } public TypeHistogram() { // nada } public static void main(String[] args) throws IOException { ExampleHelpers.configureLogging(); TypeHistogram processor = new TypeHistogram(); ExampleHelpers.processEntitiesFromWikidataDump(processor); processor.writeFinalResults(); } @Override public void processItemDocument(ItemDocument itemDocument) { // Print status once in a while if (this.nprocessed % 100000 == 0) { printStatus(); } this.nprocessed++; Map<String, String> attributes = Maps.newTreeMap(); for (StatementGroup sg : itemDocument.getStatementGroups()) { EntityIdValue subject = sg.getSubject(); switch (sg.getProperty().getId()) { case "P31": // P31 is "instance of" { // Iterate over all statements for (Statement s : sg.getStatements()) { // Find the main claim and check if it has a value if (s.getClaim().getMainSnak() instanceof ValueSnak) { Value v = ((ValueSnak) s.getClaim().getMainSnak()).getValue(); if(v instanceof ItemIdValue) { EntityIdValue ev = (ItemIdValue)v; String type = ev.getId(); //String title = ev.getTitle(); Integer count = histogram.get(type); if(count == null) { histogram.put(type, 1); } else { histogram.put(type, count + 1); } } } } break; } } } } @Override public void processPropertyDocument(PropertyDocument propertyDocument) { // TODO Auto-generated method stub } /** * Prints the current status to the system output. */ private void printStatus() { LinkedHashMap<String, Integer> smap = sortHashMapByValuesD(histogram); for(String key : smap.keySet()) { System.out.println(key + " " + smap.get(key)); } } public void writeFinalResults() { printStatus(); // Print the gazetteer try (PrintStream out = new PrintStream(ExampleHelpers.openExampleFileOuputStream("type_histogram.txt"))) { LinkedHashMap<String, Integer> smap = sortHashMapByValuesD(histogram); for(String key : smap.keySet()) { out.println(key + " " + smap.get(key)); } } catch (IOException e) { e.printStackTrace(); } } /** * Checks if the given group of statements contains the given value as the * value of a main snak of some statement. * * @param statementGroup * the statement group to scan * @param value * the value to scan for * @return true if value was found */ private boolean containsValue(StatementGroup statementGroup, Value value) { // Iterate over all statements for (Statement s : statementGroup.getStatements()) { // Find the main claim and check if it has a value if (s.getClaim().getMainSnak() instanceof ValueSnak) { Value v = ((ValueSnak) s.getClaim().getMainSnak()).getValue(); // Check if the value is an ItemIdValue if (value.equals(v)) { return true; } } } return false; } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.vcs; import com.intellij.openapi.diff.impl.patch.formove.FilePathComparator; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.options.UnnamedConfigurable; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Pair; import com.intellij.openapi.vcs.annotate.AnnotationProvider; import com.intellij.openapi.vcs.changes.ChangeProvider; import com.intellij.openapi.vcs.changes.CommitExecutor; import com.intellij.openapi.vcs.changes.LocalChangeList; import com.intellij.openapi.vcs.checkin.CheckinEnvironment; import com.intellij.openapi.vcs.diff.DiffProvider; import com.intellij.openapi.vcs.diff.RevisionSelector; import com.intellij.openapi.vcs.history.VcsHistoryProvider; import com.intellij.openapi.vcs.history.VcsRevisionNumber; import com.intellij.openapi.vcs.merge.MergeProvider; import com.intellij.openapi.vcs.rollback.RollbackEnvironment; import com.intellij.openapi.vcs.update.UpdateEnvironment; import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.ThreeState; import com.intellij.util.ThrowableRunnable; import com.intellij.util.ui.VcsSynchronousProgressWrapper; import org.jetbrains.annotations.CalledInAwt; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.function.Function; /** * The base class for a version control system integrated with IDEA. * * @see ProjectLevelVcsManager */ public abstract class AbstractVcs<ComList extends CommittedChangeList> extends StartedActivated { @NonNls protected static final String ourIntegerPattern = "\\d+"; @NotNull protected final Project myProject; private final String myName; private final VcsKey myKey; private VcsShowSettingOption myUpdateOption; private VcsShowSettingOption myStatusOption; private CheckinEnvironment myCheckinEnvironment; private UpdateEnvironment myUpdateEnvironment; private RollbackEnvironment myRollbackEnvironment; public AbstractVcs(@NotNull Project project, final String name) { super(project); myProject = project; myName = name; myKey = new VcsKey(myName); } // acts as adapter @Override protected void start() throws VcsException { } @Override protected void shutdown() throws VcsException { } @Override protected void activate() { } @Override protected void deactivate() { } @NonNls public final String getName() { return myName; } @NonNls public abstract String getDisplayName(); public abstract Configurable getConfigurable(); @Nullable public TransactionProvider getTransactionProvider() { return null; } @Nullable public ChangeProvider getChangeProvider() { return null; } public final VcsConfiguration getConfiguration() { return VcsConfiguration.getInstance(myProject); } /** * Returns the interface for performing check out / edit file operations. * * @return the interface implementation, or null if none is provided. */ @Nullable public EditFileProvider getEditFileProvider() { return null; } public boolean markExternalChangesAsUpToDate() { return false; } /** * creates the object for performing checkin / commit / submit operations. */ @Nullable protected CheckinEnvironment createCheckinEnvironment() { return null; } /** * !!! concrete VCS should define {@link #createCheckinEnvironment} method * this method wraps created environment with a listener * * Returns the interface for performing checkin / commit / submit operations. * * @return the checkin interface, or null if checkins are not supported by the VCS. */ @Nullable public CheckinEnvironment getCheckinEnvironment() { return myCheckinEnvironment; } /** * Returns the interface for performing revert / rollback operations. */ @Nullable protected RollbackEnvironment createRollbackEnvironment() { return null; } /** * !!! concrete VCS should define {@link #createRollbackEnvironment()} method * this method wraps created environment with a listener * * @return the rollback interface, or null if rollbacks are not supported by the VCS. */ @Nullable public RollbackEnvironment getRollbackEnvironment() { return myRollbackEnvironment; } @Nullable public VcsHistoryProvider getVcsHistoryProvider() { return null; } @Nullable public VcsHistoryProvider getVcsBlockHistoryProvider() { return null; } public String getMenuItemText() { return getDisplayName(); } /** * Returns the interface for performing update/sync operations. */ @Nullable protected UpdateEnvironment createUpdateEnvironment() { return null; } /** * !!! concrete VCS should define {@link #createUpdateEnvironment()} method * this method wraps created environment with a listener * * @return the update interface, or null if the updates are not supported by the VCS. */ @Nullable public UpdateEnvironment getUpdateEnvironment() { return myUpdateEnvironment; } /** * Returns true if the specified file path is located under a directory which is managed by this VCS. * This method is called only for directories which are mapped to this VCS in the project configuration. * * @param filePath the path to check. * @return true if the path is managed by this VCS, false otherwise. */ public boolean fileIsUnderVcs(FilePath filePath) { return true; } /** * Returns true if the specified file path represents a file which exists in the VCS repository (is neither * unversioned nor scheduled for addition). * This method is called only for directories which are mapped to this VCS in the project configuration. * * @param path the path to check. * @return true if the corresponding file exists in the repository, false otherwise. */ public boolean fileExistsInVcs(FilePath path) { final VirtualFile virtualFile = path.getVirtualFile(); if (virtualFile != null) { final FileStatus fileStatus = FileStatusManager.getInstance(myProject).getStatus(virtualFile); return fileStatus != FileStatus.UNKNOWN && fileStatus != FileStatus.ADDED; } return true; } /** * This method is called when user invokes "Enable VCS Integration" and selects a particular VCS. * By default it sets up a single mapping {@code <Project> -> selected VCS}. */ @CalledInAwt public void enableIntegration() { ProjectLevelVcsManager vcsManager = ProjectLevelVcsManager.getInstance(myProject); if (vcsManager != null) { vcsManager.setDirectoryMappings(Arrays.asList(new VcsDirectoryMapping("", getName()))); } } /** * Invoked when a changelist is deleted explicitly by user or implicitly (e.g. after default changelist switch * when the previous one was empty). * @param list change list that's about to be removed * @param explicitly whether it's a result of explicit Delete action, or just after switching the active changelist. * @return UNSURE if the VCS has nothing to say about this changelist. * YES or NO if the changelist has to be removed or not, and no further confirmations are needed about this changelist * (in particular, the VCS can show a confirmation to the user by itself) */ @CalledInAwt @NotNull public ThreeState mayRemoveChangeList(@NotNull LocalChangeList list, boolean explicitly) { return ThreeState.UNSURE; } public boolean isTrackingUnchangedContent() { return false; } public static boolean fileInVcsByFileStatus(@NotNull Project project, @NotNull FilePath path) { VirtualFile file = path.getVirtualFile(); return file == null || fileInVcsByFileStatus(project, file); } public static boolean fileInVcsByFileStatus(@NotNull Project project, @NotNull VirtualFile file) { FileStatus status = FileStatusManager.getInstance(project).getStatus(file); return status != FileStatus.UNKNOWN && status != FileStatus.ADDED && status != FileStatus.IGNORED; } /** * Returns the interface for performing "check status" operations (operations which show the differences between * the local working copy state and the latest server state). * * @return the status interface, or null if the check status operation is not supported or required by the VCS. */ @Nullable public UpdateEnvironment getStatusEnvironment() { return null; } @Nullable public AnnotationProvider getAnnotationProvider() { return null; } @Nullable public DiffProvider getDiffProvider() { return null; } public VcsShowSettingOption getUpdateOptions() { return myUpdateOption; } public VcsShowSettingOption getStatusOptions() { return myStatusOption; } public void loadSettings() { final ProjectLevelVcsManager vcsManager = ProjectLevelVcsManager.getInstance(myProject); if (getUpdateEnvironment() != null) { myUpdateOption = vcsManager.getStandardOption(VcsConfiguration.StandardOption.UPDATE, this); } if (getStatusEnvironment() != null) { myStatusOption = vcsManager.getStandardOption(VcsConfiguration.StandardOption.STATUS, this); } } public FileStatus[] getProvidedStatuses() { return null; } /** * Returns the interface for selecting file version numbers. * * @return the revision selector implementation, or null if none is provided. * @since 5.0.2 */ @Nullable public RevisionSelector getRevisionSelector() { return null; } /** * Returns the interface for performing integrate operations (merging changes made in another branch of * the project into the current working copy). * * @return the update interface, or null if the integrate operations are not supported by the VCS. */ @Nullable public UpdateEnvironment getIntegrateEnvironment() { return null; } @Nullable public CommittedChangesProvider getCommittedChangesProvider() { return null; } @Nullable public final CachingCommittedChangesProvider getCachingCommittedChangesProvider() { CommittedChangesProvider provider = getCommittedChangesProvider(); if (provider instanceof CachingCommittedChangesProvider) { return (CachingCommittedChangesProvider)provider; } return null; } /** * For some version controls (like Git) the revision parsing is dependent * on the the specific repository instance since the the revision number * returned from this method is later used for comparison information. * By default, this method invokes {@link #parseRevisionNumber(String)}. * The client code should invoke this method, if it expect ordering information * from revision numbers. * * @param revisionNumberString the string to be parsed * @param path the path for which revision number is queried * @return the parsed revision number */ @Nullable public VcsRevisionNumber parseRevisionNumber(String revisionNumberString, FilePath path) throws VcsException { return parseRevisionNumber(revisionNumberString); } @Nullable public VcsRevisionNumber parseRevisionNumber(String revisionNumberString) throws VcsException { return null; } /** * @return null if does not support revision parsing */ @Nullable public String getRevisionPattern() { return null; } /** * Checks if the specified directory is managed by this version control system (regardless of the * project VCS configuration). For example, for CVS this checks the presense of "CVS" admin directories. * This method is used for VCS autodetection during initial project creation and VCS configuration. * * @param dir the directory to check. * @return {@code true} if directory is managed by this VCS */ public boolean isVersionedDirectory(VirtualFile dir) { return false; } /** * Returns the configurable to be shown in the VCS directory mapping dialog which should be displayed * for configuring VCS-specific settings for the specified root, or null if no such configuration is required. * The VCS-specific settings are stored in {@link VcsDirectoryMapping#getRootSettings()}. * * @param mapping the mapping being configured * @return the configurable instance, or null if no configuration is required. */ @Nullable public UnnamedConfigurable getRootConfigurable(VcsDirectoryMapping mapping) { return null; } @Nullable public VcsRootSettings createEmptyVcsRootSettings() { return null; } @Nullable public RootsConvertor getCustomConvertor() { return null; } public interface RootsConvertor { @NotNull List<VirtualFile> convertRoots(@NotNull List<VirtualFile> result); } /** * Returns the implementation of the merge provider which is used to load the revisions to be merged * for a particular file. * * @return the merge provider implementation, or null if the VCS doesn't support merge operations. */ @Nullable public MergeProvider getMergeProvider() { return null; } public boolean allowsNestedRoots() { return false; } @NotNull public <S> List<S> filterUniqueRoots(@NotNull List<S> in, @NotNull Function<S, VirtualFile> convertor) { new FilterDescendantVirtualFileConvertible<>(convertor, FilePathComparator.getInstance()).doFilter(in); return in; } @NotNull public static <S> List<S> filterUniqueRootsDefault(@NotNull List<S> in, @NotNull Function<S, VirtualFile> convertor) { new FilterDescendantVirtualFileConvertible<>(convertor, FilePathComparator.getInstance()).doFilter(in); return in; } @Nullable public VcsExceptionsHotFixer getVcsExceptionsHotFixer() { return null; } @NotNull public Project getProject() { return myProject; } protected static VcsKey createKey(final String name) { return new VcsKey(name); } public final VcsKey getKeyInstanceMethod() { return myKey; } public VcsType getType() { return VcsType.centralized; } @Nullable protected VcsOutgoingChangesProvider<ComList> getOutgoingProviderImpl() { return null; } @Nullable public final VcsOutgoingChangesProvider<ComList> getOutgoingChangesProvider() { return VcsType.centralized.equals(getType()) ? null : getOutgoingProviderImpl(); } public RemoteDifferenceStrategy getRemoteDifferenceStrategy() { return RemoteDifferenceStrategy.ASK_LATEST_REVISION; } public boolean areDirectoriesVersionedItems() { return false; } @Nullable protected TreeDiffProvider getTreeDiffProviderImpl() { return null; } @Nullable public TreeDiffProvider getTreeDiffProvider() { final RemoteDifferenceStrategy strategy = getRemoteDifferenceStrategy(); return RemoteDifferenceStrategy.ASK_LATEST_REVISION.equals(strategy) ? null : getTreeDiffProviderImpl(); } public List<CommitExecutor> getCommitExecutors() { return Collections.emptyList(); } /** * Can be temporarily forbidden, for instance, when authorization credentials are wrong - to * don't repeat wrong credentials passing (in some cases it can produce user's account blocking) */ public boolean isVcsBackgroundOperationsAllowed(final VirtualFile root) { return true; } public boolean allowsRemoteCalls(@NotNull final VirtualFile file) { return true; } public void setCheckinEnvironment(CheckinEnvironment checkinEnvironment) { if (myCheckinEnvironment != null) throw new IllegalStateException("Attempt to redefine checkin environment"); myCheckinEnvironment = checkinEnvironment; } public void setUpdateEnvironment(UpdateEnvironment updateEnvironment) { if (myUpdateEnvironment != null) throw new IllegalStateException("Attempt to redefine update environment"); myUpdateEnvironment = updateEnvironment; } public void setRollbackEnvironment(RollbackEnvironment rollbackEnvironment) { if (myRollbackEnvironment != null) throw new IllegalStateException("Attempt to redefine rollback environment"); myRollbackEnvironment = rollbackEnvironment; } public void setupEnvironments() { setCheckinEnvironment(createCheckinEnvironment()); setUpdateEnvironment(createUpdateEnvironment()); setRollbackEnvironment(createRollbackEnvironment()); } @Nullable public CommittedChangeList loadRevisions(final VirtualFile vf, final VcsRevisionNumber number) { final CommittedChangeList[] list = new CommittedChangeList[1]; final ThrowableRunnable<VcsException> runnable = () -> { final Pair<CommittedChangeList, FilePath> pair = getCommittedChangesProvider().getOneList(vf, number); if (pair != null) { list[0] = pair.getFirst(); } }; return VcsSynchronousProgressWrapper.wrap(runnable, getProject(), "Load revision contents") ? list[0] : null; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; AbstractVcs that = (AbstractVcs)o; if (!myKey.equals(that.myKey)) return false; return true; } @Override public int hashCode() { return myKey.hashCode(); } public boolean fileListenerIsSynchronous() { return true; } public boolean arePartialChangelistsSupported() { return false; } public CheckoutProvider getCheckoutProvider() { return null; } @Override public String toString() { return getName(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.cache.Cache; import javax.cache.processor.EntryProcessor; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.distributed.dht.preloader.IgniteDhtDemandedPartitionsMap; import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition; import org.apache.ignite.internal.processors.cache.mvcc.MvccSnapshot; import org.apache.ignite.internal.processors.cache.mvcc.MvccVersion; import org.apache.ignite.internal.processors.cache.persistence.CacheDataRow; import org.apache.ignite.internal.processors.cache.persistence.CacheSearchRow; import org.apache.ignite.internal.processors.cache.persistence.DataRowCacheAware; import org.apache.ignite.internal.processors.cache.persistence.RootPage; import org.apache.ignite.internal.processors.cache.persistence.RowStore; import org.apache.ignite.internal.processors.cache.persistence.freelist.SimpleDataRow; import org.apache.ignite.internal.processors.cache.persistence.partstate.GroupPartitionId; import org.apache.ignite.internal.processors.cache.persistence.partstorage.PartitionMetaStorage; import org.apache.ignite.internal.processors.cache.persistence.tree.reuse.ReuseList; import org.apache.ignite.internal.processors.cache.tree.CacheDataTree; import org.apache.ignite.internal.processors.cache.tree.PendingEntriesTree; import org.apache.ignite.internal.processors.cache.tree.mvcc.data.MvccUpdateResult; import org.apache.ignite.internal.processors.cache.tree.mvcc.search.MvccLinkAwareSearchRow; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.processors.query.GridQueryRowCacheCleaner; import org.apache.ignite.internal.util.GridAtomicLong; import org.apache.ignite.internal.util.GridLongList; import org.apache.ignite.internal.util.IgniteTree; import org.apache.ignite.internal.util.lang.GridCloseableIterator; import org.apache.ignite.internal.util.lang.GridCursor; import org.apache.ignite.internal.util.lang.GridIterator; import org.apache.ignite.internal.util.lang.IgniteInClosure2X; import org.apache.ignite.internal.util.lang.IgnitePredicateX; import org.apache.ignite.lang.IgniteBiTuple; import org.jetbrains.annotations.Nullable; /** * */ @SuppressWarnings("WeakerAccess") public interface IgniteCacheOffheapManager { /** * @param ctx Context. * @param grp Cache group. * @throws IgniteCheckedException If failed. */ public void start(GridCacheSharedContext ctx, CacheGroupContext grp) throws IgniteCheckedException; /** * @param cctx Cache context. * @throws IgniteCheckedException If failed. */ public void onCacheStarted(GridCacheContext cctx) throws IgniteCheckedException; /** * */ public void onKernalStop(); /** * @param cacheId Cache ID. * @param destroy Destroy data flag. Setting to <code>true</code> will remove all cache data. */ public void stopCache(int cacheId, boolean destroy); /** * */ public void stop(); /** * Pre-create partitions that resides in page memory or WAL and restores their state. * * @param partRecoveryStates Partition recovery states. * @return Processed partitions: partition id -> processing time in millis. * @throws IgniteCheckedException If failed. */ Map<Integer, Long> restorePartitionStates( Map<GroupPartitionId, Integer> partRecoveryStates ) throws IgniteCheckedException; /** * Partition counter update callback. May be overridden by plugin-provided subclasses. * * @param part Partition. * @param cntr Partition counter. */ public void onPartitionCounterUpdated(int part, long cntr); /** * Initial counter will be updated on state restore only * * @param part Partition * @param start Start. * @param delta Delta. */ public void onPartitionInitialCounterUpdated(int part, long start, long delta); /** * Partition counter provider. May be overridden by plugin-provided subclasses. * * @param part Partition ID. * @return Last updated counter. */ public long lastUpdatedPartitionCounter(int part); /** * @param entry Cache entry. * @return Cached row, if available, null otherwise. * @throws IgniteCheckedException If failed. */ @Nullable public CacheDataRow read(GridCacheMapEntry entry) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @return Cached row, if available, null otherwise. * @throws IgniteCheckedException If failed. */ @Nullable public CacheDataRow read(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * @param p Partition. * @return Data store. * @throws IgniteCheckedException If failed. */ public CacheDataStore createCacheDataStore(int p) throws IgniteCheckedException; /** * @return Iterable over all existing cache data stores. */ public Iterable<CacheDataStore> cacheDataStores(); /** * @param part Partition. * @return Data store. */ public CacheDataStore dataStore(GridDhtLocalPartition part); /** * @param store Data store. * @throws IgniteCheckedException If failed. */ public void destroyCacheDataStore(CacheDataStore store) throws IgniteCheckedException; /** * TODO: GG-10884, used on only from initialValue. */ public boolean containsKey(GridCacheMapEntry entry); /** * @param cctx Cache context. * @param c Closure. * @param amount Limit of processed entries by single call, {@code -1} for no limit. * @return {@code True} if unprocessed expired entries remains. * @throws IgniteCheckedException If failed. */ public boolean expire(GridCacheContext cctx, IgniteInClosure2X<GridCacheEntryEx, GridCacheVersion> c, int amount) throws IgniteCheckedException; /** * Gets the number of entries pending expire. * * @return Number of pending entries. * @throws IgniteCheckedException If failed to get number of pending entries. */ public long expiredSize() throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param part Partition. * @param c Tree update closure. * @throws IgniteCheckedException If failed. */ public void invoke(GridCacheContext cctx, KeyCacheObject key, GridDhtLocalPartition part, OffheapInvokeClosure c) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param mvccSnapshot MVCC snapshot. * @return Cached row, if available, null otherwise. * @throws IgniteCheckedException If failed. */ @Nullable public CacheDataRow mvccRead(GridCacheContext cctx, KeyCacheObject key, MvccSnapshot mvccSnapshot) throws IgniteCheckedException; /** * For testing only. * * @param cctx Cache context. * @param key Key. * @return All stored versions for given key. * @throws IgniteCheckedException If failed. */ public List<IgniteBiTuple<Object, MvccVersion>> mvccAllVersions(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * Returns iterator over the all row versions for the given key. * * @param cctx Cache context. * @param key Key. * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @return Iterator over all versions. * @throws IgniteCheckedException If failed. */ GridCursor<CacheDataRow> mvccAllVersionsCursor(GridCacheContext cctx, KeyCacheObject key, Object x) throws IgniteCheckedException; /** * @param entry Entry. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param mvccVer MVCC version. * @param newMvccVer New MVCC version. * @return {@code True} if value was inserted. * @throws IgniteCheckedException If failed. */ public boolean mvccInitialValue( GridCacheMapEntry entry, @Nullable CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer, MvccVersion newMvccVer ) throws IgniteCheckedException; /** * Tries to apply entry history. * Either applies full entry history or do nothing. * * @param entry Entry to update. * @param hist Full entry history. * @return {@code True} if history applied successfully, {@code False} otherwise. */ boolean mvccApplyHistoryIfAbsent(GridCacheMapEntry entry, List<GridCacheMvccEntryInfo> hist) throws IgniteCheckedException; /** * @param entry Entry. * @param val Value. * @param ver Cache version. * @param expireTime Expire time. * @param mvccSnapshot MVCC snapshot. * @param primary {@code True} if on primary node. * @param needHist Flag to collect history. * @param noCreate Flag indicating that row should not be created if absent. * @param needOldVal {@code True} if need old value. * @param filter Filter. * @param retVal Flag to return previous value. * @param keepBinary Keep binary flag. * @param entryProc Entry processor. * @param invokeArgs Entry processor invoke arguments. * @return Update result. * @throws IgniteCheckedException If failed. */ public MvccUpdateResult mvccUpdate( GridCacheMapEntry entry, CacheObject val, GridCacheVersion ver, long expireTime, MvccSnapshot mvccSnapshot, boolean primary, boolean needHist, boolean noCreate, boolean needOldVal, @Nullable CacheEntryPredicate filter, boolean retVal, boolean keepBinary, EntryProcessor entryProc, Object[] invokeArgs) throws IgniteCheckedException; /** * @param entry Entry. * @param mvccSnapshot MVCC snapshot. * @param primary {@code True} if on primary node. * @param needHist Flag to collect history. * @param needOldVal {@code True} if need old value. * @param filter Filter. * @param retVal Flag to return previous value. * @return Update result. * @throws IgniteCheckedException If failed. */ @Nullable public MvccUpdateResult mvccRemove( GridCacheMapEntry entry, MvccSnapshot mvccSnapshot, boolean primary, boolean needHist, boolean needOldVal, @Nullable CacheEntryPredicate filter, boolean retVal) throws IgniteCheckedException; /** * @param entry Entry. * @param mvccSnapshot MVCC snapshot. * @return Update result. * @throws IgniteCheckedException If failed. */ @Nullable public MvccUpdateResult mvccLock( GridCacheMapEntry entry, MvccSnapshot mvccSnapshot ) throws IgniteCheckedException; /** * Apply update with full history. * Note: History version may be skipped if it have already been actualized with previous update operation. * * @param entry Entry. * @param val Value. * @param ver Version. * @param mvccVer MVCC version. * @param newMvccVer New MVCC version. * @return {@code True} if value was inserted. * @throws IgniteCheckedException If failed. */ public boolean mvccUpdateRowWithPreloadInfo( GridCacheMapEntry entry, @Nullable CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer, MvccVersion newMvccVer, byte mvccTxState, byte newMvccTxState ) throws IgniteCheckedException; /** * @param entry Entry. * @throws IgniteCheckedException If failed. */ public void mvccRemoveAll(GridCacheMapEntry entry) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param oldRow Old row if available. * @param part Partition. * @throws IgniteCheckedException If failed. */ public void update( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, GridDhtLocalPartition part, @Nullable CacheDataRow oldRow ) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param part Partition. * @param mvccVer Mvcc version. * @throws IgniteCheckedException If failed. */ void mvccApplyUpdate( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, GridDhtLocalPartition part, MvccVersion mvccVer) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param partId Partition number. * @param part Partition. * @throws IgniteCheckedException If failed. */ public void remove( GridCacheContext cctx, KeyCacheObject key, int partId, GridDhtLocalPartition part ) throws IgniteCheckedException; /** * @param ldr Class loader. * @return Number of undeployed entries. */ public int onUndeploy(ClassLoader ldr); /** * @param cacheId Cache ID. * @param primary Primary entries flag. * @param backup Backup entries flag. * @param topVer Topology version. * @param mvccSnapshot MVCC snapshot. * @param dataPageScanEnabled Flag to enable data page scan. * @return Rows iterator. * @throws IgniteCheckedException If failed. */ public GridIterator<CacheDataRow> cacheIterator(int cacheId, boolean primary, boolean backup, AffinityTopologyVersion topVer, @Nullable MvccSnapshot mvccSnapshot, Boolean dataPageScanEnabled ) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param part Partition. * @param mvccSnapshot MVCC snapshot. * @param dataPageScanEnabled Flag to enable data page scan. * @return Partition data iterator. * @throws IgniteCheckedException If failed. */ public GridIterator<CacheDataRow> cachePartitionIterator(int cacheId, final int part, @Nullable MvccSnapshot mvccSnapshot, Boolean dataPageScanEnabled) throws IgniteCheckedException; /** * @param part Partition number. * @return Iterator for given partition. * @throws IgniteCheckedException If failed. */ public GridIterator<CacheDataRow> partitionIterator(final int part) throws IgniteCheckedException; /** * @param part Partition number. * @param topVer Topology version. * @return Iterator for given partition that will reserve partition state until it is closed. * @throws IgniteCheckedException If failed. */ public GridCloseableIterator<CacheDataRow> reservedIterator(final int part, final AffinityTopologyVersion topVer) throws IgniteCheckedException; /** * @param parts Partitions. * @return Partition data iterator. * @throws IgniteCheckedException If failed. */ // TODO: MVCC> public IgniteRebalanceIterator rebalanceIterator(IgniteDhtDemandedPartitionsMap parts, AffinityTopologyVersion topVer) throws IgniteCheckedException; /** * @param cctx Cache context. * @param primary {@code True} if need to return primary entries. * @param backup {@code True} if need to return backup entries. * @param topVer Topology version. * @param keepBinary Keep binary flag. * @param mvccSnapshot MVCC snapshot. * @param dataPageScanEnabled Flag to enable data page scan. * @return Entries iterator. * @throws IgniteCheckedException If failed. */ public <K, V> GridCloseableIterator<Cache.Entry<K, V>> cacheEntriesIterator( GridCacheContext cctx, final boolean primary, final boolean backup, final AffinityTopologyVersion topVer, final boolean keepBinary, @Nullable final MvccSnapshot mvccSnapshot, Boolean dataPageScanEnabled ) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param part Partition. * @return Iterator. * @throws IgniteCheckedException If failed. */ // TODO: MVCC> public GridCloseableIterator<KeyCacheObject> cacheKeysIterator(int cacheId, final int part) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param primary Primary entries flag. * @param backup Backup entries flag. * @param topVer Topology version. * @return Entries count. * @throws IgniteCheckedException If failed. */ // TODO: MVCC> public long cacheEntriesCount(int cacheId, boolean primary, boolean backup, AffinityTopologyVersion topVer) throws IgniteCheckedException; /** * Store entries. * * @param partId Partition number. * @param infos Entry infos. * @param initPred Applied to all created rows. Each row that not matches the predicate is removed. * @throws IgniteCheckedException If failed. */ public void storeEntries(int partId, Iterator<GridCacheEntryInfo> infos, IgnitePredicateX<CacheDataRow> initPred) throws IgniteCheckedException; /** * Clears offheap entries. * * @param cctx Cache context. * @param readers {@code True} to clear readers. */ public void clearCache(GridCacheContext cctx, boolean readers); /** * @param cacheId Cache ID. * @param part Partition. * @return Number of entries in given partition. */ public long cacheEntriesCount(int cacheId, int part); /** * @return Offheap allocated size. */ public long offHeapAllocatedSize(); /** * @return Global remove ID counter. */ public GridAtomicLong globalRemoveId(); /** * @param cacheId Cache ID. * @param idxName Index name. * @param segment Segment. * @return Root page for index tree. * @throws IgniteCheckedException If failed. */ public RootPage rootPageForIndex(int cacheId, String idxName, int segment) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param idxName Index name. * @throws IgniteCheckedException If failed. */ public @Nullable RootPage findRootPageForIndex(int cacheId, String idxName, int segment) throws IgniteCheckedException; /** * Dropping the root page of the index tree. * * @param cacheId Cache ID. * @param idxName Index name. * @param segment Segment index. * @return Dropped root page of the index tree. * @throws IgniteCheckedException If failed. */ @Nullable RootPage dropRootPageForIndex(int cacheId, String idxName, int segment) throws IgniteCheckedException; /** * Renaming the root page of the index tree. * * @param cacheId Cache id. * @param oldIdxName Old name of the index tree. * @param newIdxName New name of the index tree. * @param segment Segment index. * @return Renamed root page of the index tree. * @throws IgniteCheckedException If failed. */ @Nullable RootPage renameRootPageForIndex( int cacheId, String oldIdxName, String newIdxName, int segment ) throws IgniteCheckedException; /** * @param idxName Index name. * @return Reuse list for index tree. * @throws IgniteCheckedException If failed. */ public ReuseList reuseListForIndex(String idxName) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @return Number of entries. */ public long cacheEntriesCount(int cacheId); /** * @param part Partition. * @return Number of entries. */ public long totalPartitionEntriesCount(int part); /** * Preload a partition. Must be called under partition reservation for DHT caches. * * @param part Partition. * @throws IgniteCheckedException If failed. */ public void preloadPartition(int part) throws IgniteCheckedException; /** * */ interface OffheapInvokeClosure extends IgniteTree.InvokeClosure<CacheDataRow> { /** * @return Old row. */ @Nullable public CacheDataRow oldRow(); /** * Flag that indicates if oldRow was expired during invoke. * @return {@code true} if old row was expired, {@code false} otherwise. */ public boolean oldRowExpiredFlag(); } /** * */ interface CacheDataStore { /** * @return Cache data tree object. */ public CacheDataTree tree(); /** * Initialize data store if it exists. * * @return {@code True} if initialized. */ boolean init(); /** * @return Partition ID. */ int partId(); /** * @param cacheId Cache ID. * @return Size. */ long cacheSize(int cacheId); /** * @return Cache sizes if store belongs to group containing multiple caches. */ Map<Integer, Long> cacheSizes(); /** * @return Total size. */ long fullSize(); /** * @return {@code True} if there are no items in the store. */ boolean isEmpty(); /** * Updates size metric for particular cache. * * @param cacheId Cache ID. * @param delta Size delta. */ void updateSize(int cacheId, long delta); /** * @return Update counter (LWM). */ long updateCounter(); /** * @return Reserved counter (HWM). */ long reservedCounter(); /** * @return Update counter or {@code null} if store is not yet created. */ @Nullable PartitionUpdateCounter partUpdateCounter(); /** * @param delta Delta. */ long reserve(long delta); /** * @param val Update counter. */ void updateCounter(long val); /** * Updates counters from start value by delta value. * @param start Start. * @param delta Delta. */ boolean updateCounter(long start, long delta); /** * @return Next update counter. */ public long nextUpdateCounter(); /** * Returns current value and updates counter by delta. * * @param delta Delta. * @return Current value. */ public long getAndIncrementUpdateCounter(long delta); /** * @return Initial update counter. */ public long initialUpdateCounter(); /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param oldRow Old row. * @return New row. * @throws IgniteCheckedException If failed. */ CacheDataRow createRow( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, @Nullable CacheDataRow oldRow) throws IgniteCheckedException; /** * Insert rows into page memory. * * @param rows Rows. * @param initPred Applied to all rows. Each row that not matches the predicate is removed. * @throws IgniteCheckedException If failed. */ public void insertRows(Collection<DataRowCacheAware> rows, IgnitePredicateX<CacheDataRow> initPred) throws IgniteCheckedException; /** * @param cctx Cache context. * @param cleanupRows Rows to cleanup. * @throws IgniteCheckedException If failed. * @return Cleaned rows count. */ public int cleanup(GridCacheContext cctx, @Nullable List<MvccLinkAwareSearchRow> cleanupRows) throws IgniteCheckedException; /** * * @param cctx Cache context. * @param row Row. * @throws IgniteCheckedException */ public void updateTxState(GridCacheContext cctx, CacheSearchRow row) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param oldRow Old row if available. * @throws IgniteCheckedException If failed. */ void update( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, @Nullable CacheDataRow oldRow) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param mvccVer MVCC version. * @param newMvccVer New MVCC version. * @return {@code True} if new value was inserted. * @throws IgniteCheckedException If failed. */ boolean mvccInitialValue( GridCacheContext cctx, KeyCacheObject key, @Nullable CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer, MvccVersion newMvccVer) throws IgniteCheckedException; /** * Tries to apply entry history. * Either applies full entry history or do nothing. * * @param cctx Cache context. * @param key Key. * @param hist Full entry history. * @return {@code True} if entry history applied successfully, {@code False} otherwise. */ boolean mvccApplyHistoryIfAbsent( GridCacheContext cctx, KeyCacheObject key, List<GridCacheMvccEntryInfo> hist) throws IgniteCheckedException; /** * Apply update with full history. * Note: History version may be skipped if it have already been actualized with previous update operation. * * @param cctx Grid cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expiration time. * @param mvccVer Mvcc version. * @param newMvccVer New mvcc version. * @return {@code true} on success. * @throws IgniteCheckedException, if failed. */ boolean mvccUpdateRowWithPreloadInfo( GridCacheContext cctx, KeyCacheObject key, @Nullable CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer, MvccVersion newMvccVer, byte mvccTxState, byte newMvccTxState) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param mvccSnapshot MVCC snapshot. * @param filter Filter. * @param entryProc Entry processor. * @param invokeArgs Entry processor invoke arguments. * @param primary {@code True} if update is executed on primary node. * @param needHist Flag to collect history. * @param noCreate Flag indicating that row should not be created if absent. * @param needOldVal {@code True} if need old value. * @param retVal Flag to return previous value. * @param keepBinary Keep binary flag. * @return Update result. * @throws IgniteCheckedException If failed. */ MvccUpdateResult mvccUpdate( GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, MvccSnapshot mvccSnapshot, @Nullable CacheEntryPredicate filter, EntryProcessor entryProc, Object[] invokeArgs, boolean primary, boolean needHist, boolean noCreate, boolean needOldVal, boolean retVal, boolean keepBinary) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param mvccSnapshot MVCC snapshot. * @param filter Filter. * @param primary {@code True} if update is executed on primary node. * @param needHistory Flag to collect history. * @param needOldVal {@code True} if need old value. * @param retVal Flag to return previous value. * @return List of transactions to wait for. * @throws IgniteCheckedException If failed. */ MvccUpdateResult mvccRemove( GridCacheContext cctx, KeyCacheObject key, MvccSnapshot mvccSnapshot, @Nullable CacheEntryPredicate filter, boolean primary, boolean needHistory, boolean needOldVal, boolean retVal) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param mvccSnapshot MVCC snapshot. * @return List of transactions to wait for. * @throws IgniteCheckedException If failed. */ MvccUpdateResult mvccLock( GridCacheContext cctx, KeyCacheObject key, MvccSnapshot mvccSnapshot) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @throws IgniteCheckedException If failed. */ void mvccRemoveAll(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param c Closure. * @throws IgniteCheckedException If failed. */ public void invoke(GridCacheContext cctx, KeyCacheObject key, OffheapInvokeClosure c) throws IgniteCheckedException; /** * * @param cctx Cache context. * @param key Key. * @param val Value. * @param ver Version. * @param expireTime Expire time. * @param mvccVer Mvcc version. * @throws IgniteCheckedException */ void mvccApplyUpdate(GridCacheContext cctx, KeyCacheObject key, CacheObject val, GridCacheVersion ver, long expireTime, MvccVersion mvccVer ) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @param partId Partition number. * @throws IgniteCheckedException If failed. */ public void remove(GridCacheContext cctx, KeyCacheObject key, int partId) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @return Data row. * @throws IgniteCheckedException If failed. */ public CacheDataRow find(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * Returns iterator over the all row versions for the given key. * * @param cctx Cache context. * @param key Key. * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @return Iterator over all versions. * @throws IgniteCheckedException If failed. */ GridCursor<CacheDataRow> mvccAllVersionsCursor(GridCacheContext cctx, KeyCacheObject key, Object x) throws IgniteCheckedException; /** * @param cctx Cache context. * @param key Key. * @return Data row. * @throws IgniteCheckedException If failed. */ public CacheDataRow mvccFind(GridCacheContext cctx, KeyCacheObject key, MvccSnapshot snapshot) throws IgniteCheckedException; /** * For testing only. * * @param cctx Cache context. * @param key Key. * @return All stored versions for given key. * @throws IgniteCheckedException If failed. */ List<IgniteBiTuple<Object, MvccVersion>> mvccFindAllVersions(GridCacheContext cctx, KeyCacheObject key) throws IgniteCheckedException; /** * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor() throws IgniteCheckedException; /** * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(Object x) throws IgniteCheckedException; /** * @param mvccSnapshot MVCC snapshot. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(MvccSnapshot mvccSnapshot) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param mvccSnapshot Mvcc snapshot. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId, MvccSnapshot mvccSnapshot) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param lower Lower bound. * @param upper Upper bound. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId, KeyCacheObject lower, KeyCacheObject upper) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param lower Lower bound. * @param upper Upper bound. * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId, KeyCacheObject lower, KeyCacheObject upper, Object x) throws IgniteCheckedException; /** * @param cacheId Cache ID. * @param lower Lower bound. * @param upper Upper bound. * @param x Implementation specific argument, {@code null} always means that we need to return full detached data row. * @param snapshot Mvcc snapshot. * @return Data cursor. * @throws IgniteCheckedException If failed. */ public GridCursor<? extends CacheDataRow> cursor(int cacheId, KeyCacheObject lower, KeyCacheObject upper, Object x, MvccSnapshot snapshot) throws IgniteCheckedException; /** * Destroys the tree associated with the store. * * @throws IgniteCheckedException If failed. */ public void destroy() throws IgniteCheckedException; /** * Mark store as destroyed. */ public void markDestroyed() throws IgniteCheckedException; /** * Clears all the records associated with logical cache with given ID. * * @param cacheId Cache ID. * @throws IgniteCheckedException If failed. */ public void clear(int cacheId) throws IgniteCheckedException; /** * @return Row store. */ public RowStore rowStore(); /** * @param start Counter. * @param delta Delta. */ public void updateInitialCounter(long start, long delta); /** * Inject rows cache cleaner. * * @param rowCacheCleaner Rows cache cleaner. */ public void setRowCacheCleaner(GridQueryRowCacheCleaner rowCacheCleaner); /** * Return PendingTree for data store. * * @return PendingTree instance. */ public PendingEntriesTree pendingTree(); /** * Flushes pending update counters closing all possible gaps. * * @return Even-length array of pairs [start, end] for each gap. */ GridLongList finalizeUpdateCounters(); /** * Preload a store into page memory. * @throws IgniteCheckedException If failed. */ public void preload() throws IgniteCheckedException; /** * Reset counter for partition. */ void resetUpdateCounter(); /** * Reset the initial value of the partition counter. */ void resetInitialUpdateCounter(); /** * Partition storage. */ public PartitionMetaStorage<SimpleDataRow> partStorage(); } }
/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* @test * @bug 8003992 8027155 * @summary Test a file whose path name is embedded with NUL character, and * ensure it is handled correctly. * @author Dan Xu */ import java.io.File; import java.io.FileFilter; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.RandomAccessFile; import java.io.FileNotFoundException; import java.io.FilenameFilter; import java.io.IOException; import java.net.MalformedURLException; import java.nio.file.InvalidPathException; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.ObjectOutputStream; import java.io.ObjectInputStream; public class NulFile { private static final char CHAR_NUL = '\u0000'; private static final String ExceptionMsg = "Invalid file path"; public static void main(String[] args) { testFile(); testFileInUnix(); testFileInWindows(); testTempFile(); } private static void testFile() { test(new File(new StringBuilder().append(CHAR_NUL).toString())); test(new File( new StringBuilder().append("").append(CHAR_NUL).toString())); test(new File( new StringBuilder().append(CHAR_NUL).append("").toString())); } private static void testFileInUnix() { String osName = System.getProperty("os.name"); if (osName.startsWith("Windows")) return; String unixFile = "/"; test(unixFile); unixFile = "//"; test(unixFile); unixFile = "data/info"; test(unixFile); unixFile = "/data/info"; test(unixFile); unixFile = "//data//info"; test(unixFile); } private static void testFileInWindows() { String osName = System.getProperty("os.name"); if (!osName.startsWith("Windows")) return; String windowsFile = "\\"; test(windowsFile); windowsFile = "\\\\"; test(windowsFile); windowsFile = "/"; test(windowsFile); windowsFile = "//"; test(windowsFile); windowsFile = "/\\"; test(windowsFile); windowsFile = "\\/"; test(windowsFile); windowsFile = "data\\info"; test(windowsFile); windowsFile = "\\data\\info"; test(windowsFile); windowsFile = "\\\\server\\data\\info"; test(windowsFile); windowsFile = "z:data\\info"; test(windowsFile); windowsFile = "z:\\data\\info"; test(windowsFile); } private static void test(final String name) { int length = name.length(); for (int i = 0; i <= length; i++) { StringBuilder sbName = new StringBuilder(name); sbName.insert(i, CHAR_NUL); String curName = sbName.toString(); // test File(String parent, String child) File testFile = new File(curName, "child"); test(testFile); testFile = new File("parent", curName); test(testFile); // test File(String pathname) testFile = new File(curName); test(testFile); // test File(File parent, String child) testFile = new File(new File(curName), "child"); test(testFile); testFile = new File(new File("parent"), curName); test(testFile); // test FileInputStream testFileInputStream(curName); // test FileOutputStream testFileOutputStream(curName); // test RandomAccessFile testRandomAccessFile(curName); } } private static void testFileInputStream(final String str) { boolean exceptionThrown = false; FileInputStream is = null; try { is = new FileInputStream(str); } catch (FileNotFoundException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("FileInputStream constructor" + " should throw FileNotFoundException"); } if (is != null) { throw new RuntimeException("FileInputStream constructor" + " should fail"); } exceptionThrown = false; is = null; try { is = new FileInputStream(new File(str)); } catch (FileNotFoundException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("FileInputStream constructor" + " should throw FileNotFoundException"); } if (is != null) { throw new RuntimeException("FileInputStream constructor" + " should fail"); } } private static void testFileOutputStream(final String str) { boolean exceptionThrown = false; FileOutputStream os = null; try { os = new FileOutputStream(str); } catch (FileNotFoundException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("FileOutputStream constructor" + " should throw FileNotFoundException"); } if (os != null) { throw new RuntimeException("FileOutputStream constructor" + " should fail"); } exceptionThrown = false; os = null; try { os = new FileOutputStream(new File(str)); } catch (FileNotFoundException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("FileOutputStream constructor" + " should throw FileNotFoundException"); } if (os != null) { throw new RuntimeException("FileOutputStream constructor" + " should fail"); } } private static void testRandomAccessFile(final String str) { boolean exceptionThrown = false; RandomAccessFile raf = null; String[] modes = {"r", "rw", "rws", "rwd"}; for (String mode : modes) { try { raf = new RandomAccessFile(str, mode); } catch (FileNotFoundException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("RandomAccessFile constructor" + " should throw FileNotFoundException"); } if (raf != null) { throw new RuntimeException("RandomAccessFile constructor" + " should fail"); } exceptionThrown = false; raf = null; try { raf = new RandomAccessFile(new File(str), mode); } catch (FileNotFoundException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("RandomAccessFile constructor" + " should throw FileNotFoundException"); } if (raf != null) { throw new RuntimeException("RandomAccessFile constructor" + " should fail"); } } } private static void test(File testFile) { test(testFile, false); // test serialization testSerialization(testFile); } @SuppressWarnings("deprecation") private static void test(File testFile, boolean derived) { boolean exceptionThrown = false; if (testFile == null) { throw new RuntimeException("test file should not be null."); } // getPath() if (testFile.getPath().indexOf(CHAR_NUL) < 0) { throw new RuntimeException( "File path should contain Nul character"); } // getAbsolutePath() if (testFile.getAbsolutePath().indexOf(CHAR_NUL) < 0) { throw new RuntimeException( "File absolute path should contain Nul character"); } // getAbsoluteFile() File derivedAbsFile = testFile.getAbsoluteFile(); if (derived) { if (derivedAbsFile.getPath().indexOf(CHAR_NUL) < 0) { throw new RuntimeException( "Derived file path should also contain Nul character"); } } else { test(derivedAbsFile, true); } // getCanonicalPath() try { exceptionThrown = false; testFile.getCanonicalPath(); } catch (IOException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException( "getCanonicalPath() should throw IOException with" + " message \"" + ExceptionMsg + "\""); } // getCanonicalFile() try { exceptionThrown = false; testFile.getCanonicalFile(); } catch (IOException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException( "getCanonicalFile() should throw IOException with" + " message \"" + ExceptionMsg + "\""); } // toURL() try { exceptionThrown = false; testFile.toURL(); } catch (MalformedURLException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("toURL() should throw IOException with" + " message \"" + ExceptionMsg + "\""); } // canRead() if (testFile.canRead()) throw new RuntimeException("File should not be readable"); // canWrite() if (testFile.canWrite()) throw new RuntimeException("File should not be writable"); // exists() if (testFile.exists()) throw new RuntimeException("File should not be existed"); // isDirectory() if (testFile.isDirectory()) throw new RuntimeException("File should not be a directory"); // isFile() if (testFile.isFile()) throw new RuntimeException("File should not be a file"); // isHidden() if (testFile.isHidden()) throw new RuntimeException("File should not be hidden"); // lastModified() if (testFile.lastModified() != 0L) throw new RuntimeException("File last modified time should be 0L"); // length() if (testFile.length() != 0L) throw new RuntimeException("File length should be 0L"); // createNewFile() try { exceptionThrown = false; testFile.createNewFile(); } catch (IOException ex) { if (ExceptionMsg.equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException( "createNewFile() should throw IOException with" + " message \"" + ExceptionMsg + "\""); } // delete() if (testFile.delete()) throw new RuntimeException("Delete operation should fail"); // list() if (testFile.list() != null) throw new RuntimeException("File list() should return null"); // list(FilenameFilter) FilenameFilter fnFilter = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return false; } }; if (testFile.list(fnFilter) != null) { throw new RuntimeException("File list(FilenameFilter) should" + " return null"); } // listFiles() if (testFile.listFiles() != null) throw new RuntimeException("File listFiles() should return null"); // listFiles(FilenameFilter) if (testFile.listFiles(fnFilter) != null) { throw new RuntimeException("File listFiles(FilenameFilter)" + " should return null"); } // listFiles(FileFilter) FileFilter fFilter = new FileFilter() { @Override public boolean accept(File file) { return false; } }; if (testFile.listFiles(fFilter) != null) { throw new RuntimeException("File listFiles(FileFilter)" + " should return null"); } // mkdir() if (testFile.mkdir()) { throw new RuntimeException("File should not be able to" + " create directory"); } // mkdirs() if (testFile.mkdirs()) { throw new RuntimeException("File should not be able to" + " create directories"); } // renameTo(File) if (testFile.renameTo(new File("dest"))) throw new RuntimeException("File rename should fail"); if (new File("dest").renameTo(testFile)) throw new RuntimeException("File rename should fail"); try { exceptionThrown = false; testFile.renameTo(null); } catch (NullPointerException ex) { exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("File rename should thrown NPE"); } // setLastModified(long) if (testFile.setLastModified(0L)) { throw new RuntimeException("File should fail to set" + " last modified time"); } try { exceptionThrown = false; testFile.setLastModified(-1); } catch (IllegalArgumentException ex) { if ("Negative time".equals(ex.getMessage())) exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("File should fail to set" + " last modified time with message \"Negative time\""); } // setReadOnly() if (testFile.setReadOnly()) throw new RuntimeException("File should fail to set read-only"); // setWritable(boolean writable, boolean ownerOnly) if (testFile.setWritable(true, true)) throw new RuntimeException("File should fail to set writable"); if (testFile.setWritable(true, false)) throw new RuntimeException("File should fail to set writable"); if (testFile.setWritable(false, true)) throw new RuntimeException("File should fail to set writable"); if (testFile.setWritable(false, false)) throw new RuntimeException("File should fail to set writable"); // setWritable(boolean writable) if (testFile.setWritable(false)) throw new RuntimeException("File should fail to set writable"); if (testFile.setWritable(true)) throw new RuntimeException("File should fail to set writable"); // setReadable(boolean readable, boolean ownerOnly) if (testFile.setReadable(true, true)) throw new RuntimeException("File should fail to set readable"); if (testFile.setReadable(true, false)) throw new RuntimeException("File should fail to set readable"); if (testFile.setReadable(false, true)) throw new RuntimeException("File should fail to set readable"); if (testFile.setReadable(false, false)) throw new RuntimeException("File should fail to set readable"); // setReadable(boolean readable) if (testFile.setReadable(false)) throw new RuntimeException("File should fail to set readable"); if (testFile.setReadable(true)) throw new RuntimeException("File should fail to set readable"); // setExecutable(boolean executable, boolean ownerOnly) if (testFile.setExecutable(true, true)) throw new RuntimeException("File should fail to set executable"); if (testFile.setExecutable(true, false)) throw new RuntimeException("File should fail to set executable"); if (testFile.setExecutable(false, true)) throw new RuntimeException("File should fail to set executable"); if (testFile.setExecutable(false, false)) throw new RuntimeException("File should fail to set executable"); // setExecutable(boolean executable) if (testFile.setExecutable(false)) throw new RuntimeException("File should fail to set executable"); if (testFile.setExecutable(true)) throw new RuntimeException("File should fail to set executable"); // canExecute() if (testFile.canExecute()) throw new RuntimeException("File should not be executable"); // getTotalSpace() if (testFile.getTotalSpace() != 0L) throw new RuntimeException("The total space should be 0L"); // getFreeSpace() if (testFile.getFreeSpace() != 0L) throw new RuntimeException("The free space should be 0L"); // getUsableSpace() if (testFile.getUsableSpace() != 0L) throw new RuntimeException("The usable space should be 0L"); // compareTo(File null) try { exceptionThrown = false; testFile.compareTo(null); } catch (NullPointerException ex) { exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("compareTo(null) should throw NPE"); } // toString() if (testFile.toString().indexOf(CHAR_NUL) < 0) { throw new RuntimeException( "File path should contain Nul character"); } // toPath() try { exceptionThrown = false; testFile.toPath(); } catch (InvalidPathException ex) { exceptionThrown = true; } if (!exceptionThrown) { throw new RuntimeException("toPath() should throw" + " InvalidPathException"); } } private static void testSerialization(File testFile) { String path = testFile.getPath(); try { // serialize test file ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(baos); oos.writeObject(testFile); oos.close(); // deserialize test file byte[] bytes = baos.toByteArray(); ByteArrayInputStream is = new ByteArrayInputStream(bytes); ObjectInputStream ois = new ObjectInputStream(is); File newFile = (File) ois.readObject(); // test String newPath = newFile.getPath(); if (!path.equals(newPath)) { throw new RuntimeException( "Serialization should not change file path"); } test(newFile, false); } catch (IOException | ClassNotFoundException ex) { System.err.println("Exception happens in testSerialization"); System.err.println(ex.getMessage()); } } private static void testTempFile() { final String[] names = {"x", "xx", "xxx", "xxxx"}; final String shortPrefix = "sp"; final String prefix = "prefix"; final String suffix = "suffix"; File tmpDir = new File("tmpDir"); for (String name : names) { int length = name.length(); for (int i = 0; i <= length; i++) { StringBuilder sbName = new StringBuilder(name); sbName.insert(i, CHAR_NUL); String curName = sbName.toString(); // test prefix testCreateTempFile(curName, suffix, tmpDir); // test suffix testCreateTempFile(shortPrefix, curName, tmpDir); testCreateTempFile(prefix, curName, tmpDir); // test directory testCreateTempFile(shortPrefix, suffix, new File(curName)); testCreateTempFile(prefix, suffix, new File(curName)); } } } private static void testCreateTempFile(String prefix, String suffix, File directory) { // createTempFile(String prefix, String suffix, File directory) boolean exceptionThrown = false; boolean shortPrefix = (prefix.length() < 3); if (shortPrefix) { try { File.createTempFile(prefix, suffix, directory); } catch (IllegalArgumentException ex) { if ("Prefix string too short".equals(ex.getMessage())) exceptionThrown = true; } catch (IOException ioe) { System.err.println("IOException happens in testCreateTempFile"); System.err.println(ioe.getMessage()); } } else { try { File.createTempFile(prefix, suffix, directory); } catch (IOException ex) { String err = "Unable to create temporary file"; if (ex.getMessage() != null && ex.getMessage().startsWith(err)) exceptionThrown = true; else { throw new RuntimeException("Get IOException with message, " + ex.getMessage() + ", expect message, "+ err); } } } if (!exceptionThrown) { throw new RuntimeException("createTempFile() should throw" + (shortPrefix ? " IllegalArgumentException" : " IOException")); } } }
/** * PROJECT: Robotics sample code. * Module: Stress test for the "Java Generator" code from https://github.com/domlachowicz/java-generators. */ package com.zoominfo.util.yieldreturn; import org.junit.Test; import java.util.Iterator; import java.util.function.IntSupplier; import java.util.function.IntUnaryOperator; import java.util.concurrent.atomic.AtomicBoolean; public class GeneratorStressTest { Logger log = new Logger(); /** * A simple logger class that the rest of the code uses to log stuff and also for assertion failures. * This can be modified/replaced to adapt to other (including headless) environments. */ class Logger { void logError(String s) { System.out.println("ERR, " + s); } void logWarning(String s) { System.out.println("WARN, " + s); } void logInfo(String s) { System.out.println("INFO, " + s); } void logAssert(boolean b, String s) { if (!b) { System.out.println("ASSERTFAIL, " + s); } assert(b); } } /** * Passively tracks that only one person is "in the room" at a time... */ class EntryTracker { private AtomicBoolean isEntered = new AtomicBoolean(false); void enter() { boolean result = isEntered.compareAndSet(false, true); log.logAssert(result, "Entering a room with someone in it"); } void exit() { boolean result = isEntered.compareAndSet(true, false); log.logAssert(result, "Leaving a room with no one in it"); } } /** * Wraps a generator that returns an "int supplier", i.e., a * function with no arguments that returns an int. * Internally, it cycles through a set of sequence generators (intFuncs). * It also uses an "entry tracker" to verify that client and iterator code are strictly interleaved. */ class IntFuncGenerator extends Generator<IntSupplier> { final String name; int yieldCount; IntUnaryOperator[] intFuncs; long busyWorkCalls; int busyWorkDepth; Iterator<IntSupplier> myIterator; EntryTracker tracker; /** * Constructs a generator that yields exactly yieldCount times. The lambda returned in the ith iteration * is intFuncs[i%intFuncs.length].applyAsInt(i). * @param name Name of this generator. Used for debugging/logging. * @param intFuncs Array of int functions that determine successive lambdas * @param yieldCount Exact number of iterations for this generator * @param busyWorkCalls Count of extra "busy work" calls to make before yielding * @param busyWorkDepth Recursion depth when performing "busy work" */ public IntFuncGenerator(String name, IntUnaryOperator[] intFuncs, EntryTracker tracker, int yieldCount, long busyWorkCalls, int busyWorkDepth) { this.name = name; this.yieldCount = yieldCount; this.intFuncs = intFuncs; this.tracker = tracker; this.busyWorkCalls = busyWorkCalls; this.busyWorkDepth = busyWorkDepth; this.myIterator = null; } @Override protected void run() { tracker.enter(); // "Enter the room" (only one thread can be "in the room"). //log.logInfo("INTGEN, name:" + this.name + ", STARTING"); int count = yieldCount; while (count > 0) { // Keep cycling through the intFuncs. The lambda generated at the ith // iteration of this generator is intFuncs[i % intFuncs.length].applyAsInt(i). // The *client* code calling these generators knows this and verifies this sequence. for (IntUnaryOperator func : intFuncs) { if (count <= 0) { break; } int index = yieldCount - count; // 0-based index of the iterator count--; busyWork(busyWorkCalls, busyWorkDepth); final int param = index; // Give other threads a chance to run every now and then. // This is purely to attempt to precipitate latent threading bugs. //(see also this code in the runLambdaTests method.) if (count % 3 == 0) { Thread.yield(); } tracker.exit(); yield(() -> func.applyAsInt(param)); tracker.enter(); } } // A final exit... //log.logInfo("INTGEN, name:" + this.name + ", ENDING"); tracker.exit(); } /** * For testing purposes, we maintain an on-demand single iterator. * @return Return the "default" iterator for this instance */ public Iterator<IntSupplier> defaultIterator() { if (myIterator == null) { myIterator = this.iterator(); } return myIterator; } } /** * Do meaningless "busy work", recursing if necessary * @param calls Minimum number of calls to make * @param depth Call depth */ public void busyWork(final long calls, final int depth) { if (depth <= 0) { log.logAssert(calls == 0, "busyWork called with 0 depth and nonzero calls"); return; // ***EARLY RETURN*** } long callsLeft = Math.max(1, calls-depth); while (callsLeft > 0) { busyWork(depth-1, depth-1); callsLeft -= depth; } } /** * Create multiple generators and iterate over them "in parallel". * @param nGenerators Count of generators * @param yieldCount Total number of yields (for all generators) * @param busyWorkCalls Number of calls when doing "busy work" before each yield * @param busyWorkDepth Recursion depth when doing "busy work" before each yield */ public void runLambdaTest(int nGenerators, final int yieldCount, final long busyWorkCalls, final int busyWorkDepth ) { IntFuncGenerator[] generators = new IntFuncGenerator[nGenerators]; EntryTracker tracker = new EntryTracker(); // Keeps track of thread reentrancy. // Create some random unary int functions ... // The nth function returns n*BASE+ i. It could be defined using a loop, but this way the // structure is more clear. // The generators will keep cycling through these functions to // generate the lambda that is returned as the next yield value. The client code is expecting this and // again verifies this sequence. If the Generator code returns values in the wrong sequence // it will be caught (with high probability) by the client code. One could have more such functions and also // do more to ensure that generated values are unique across functions but that is overkill. final int BASE = 10000; IntUnaryOperator[] intFuncs = new IntUnaryOperator[] { (i) -> BASE + i, (i) -> 2*BASE + i, (i) -> 3*BASE + i, (i) -> 4*BASE + i, (i) -> 5*BASE + i, (i) -> 6*BASE + i, (i) -> 7*BASE + i, (i) -> 8*BASE + i, (i) -> 9*BASE + i, (i) -> 10*BASE + i }; tracker.enter(); //Enter the room - only one thead can be "in the room" at a time. // Initialize the generators; They are all identical (but) different instances. for (int i = 0; i < nGenerators; i++) { generators[i] = new IntFuncGenerator("G"+i, intFuncs, tracker, yieldCount, busyWorkCalls, busyWorkDepth); } // Run through the generators "in parallel" for (int i=0; i<yieldCount; i++) { for (IntFuncGenerator gen: generators) { Iterator<IntSupplier> iter = gen.defaultIterator(); // Occasionally yield... // (We could make this random but it is arguably more // repeatable this way...) if (i % 3 == 0) { Thread.yield(); } tracker.exit(); // "Exit the room" - the iterator logic will run now. IntSupplier func = iter.next(); tracker.enter(); // Get back "in the room" int actual = func.getAsInt(); int expected = intFuncs[i % intFuncs.length].applyAsInt(i); // Pick the nth function, which is (i modulo number of functions) log.logAssert(expected == actual, "runLambdaTest, generator:" + gen.name + ", iteration:" + i + " expected:" + expected + ", actual:" + actual); } } // We expect ALL the iterators to be done now... for (IntFuncGenerator gen: generators) { Iterator<IntSupplier> iter = gen.defaultIterator(); tracker.exit(); // "Exit the room" - the iterator logic will run now (yes, even for hasNext()) boolean actual = iter.hasNext(); tracker.enter(); // Get back "in the room" boolean expected = false; log.logAssert(expected == actual, "runLambda, generator:" + gen.name + ", Unexpected items left"); } } /** * Goes through stages, running more and more intensive versions of runLambdaTests. */ @Test public void testMultiple() { // Create multiple iterators and iterate over them "in parallel". log.logInfo("testMultiple, ----TESTING BEGINS----"); final int STAGES = 20; for (int i=0; i<= STAGES; i++) { int busyWorkDepth = Math.max(1, 10*i); int yieldCount = 100*i; int nGenerators = 2*i; //Math.min(i, 1); long multFactor = Math.max(1, (long) nGenerators * yieldCount); long busyWorkCalls = Math.max(2*busyWorkDepth, (long) (Math.pow(2, i)/multFactor)); log.logInfo("testMultiple, STAGE START, stage:" + i + ", nGens:" + nGenerators + ", yieldCount:" + yieldCount +", bwCalls:" + busyWorkCalls + ", bwDepth:" + busyWorkDepth); runLambdaTest(nGenerators, yieldCount, busyWorkCalls, busyWorkDepth); log.logInfo(" testMultiple, STAGE END, stage:" + i); } log.logInfo("testMultiple, ----TESTING COMPLETES----"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.azure.servicebus; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.StreamSupport; import com.azure.messaging.servicebus.ServiceBusSenderAsyncClient; import org.apache.camel.AsyncCallback; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.RuntimeCamelException; import org.apache.camel.component.azure.servicebus.client.ServiceBusClientFactory; import org.apache.camel.component.azure.servicebus.client.ServiceBusSenderAsyncClientWrapper; import org.apache.camel.component.azure.servicebus.operations.ServiceBusSenderOperations; import org.apache.camel.support.DefaultAsyncProducer; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import reactor.core.publisher.Mono; public class ServiceBusProducer extends DefaultAsyncProducer { private static final Logger LOG = LoggerFactory.getLogger(ServiceBusProducer.class); private ServiceBusSenderAsyncClientWrapper senderClientWrapper; private ServiceBusConfigurationOptionsProxy configurationOptionsProxy; private ServiceBusSenderOperations serviceBusSenderOperations; private final Map<ServiceBusProducerOperationDefinition, BiConsumer<Exchange, AsyncCallback>> operationsToExecute = new HashMap<>(); { bind(ServiceBusProducerOperationDefinition.sendMessages, sendMessages()); bind(ServiceBusProducerOperationDefinition.scheduleMessages, scheduleMessages()); } public ServiceBusProducer(final Endpoint endpoint) { super(endpoint); } @Override protected void doInit() throws Exception { super.doInit(); configurationOptionsProxy = new ServiceBusConfigurationOptionsProxy(getConfiguration()); } @Override protected void doStart() throws Exception { super.doStart(); // create the senderClient final ServiceBusSenderAsyncClient senderClient = getConfiguration().getSenderAsyncClient() != null ? getConfiguration().getSenderAsyncClient() : ServiceBusClientFactory.createServiceBusSenderAsyncClient(getConfiguration()); // create the wrapper senderClientWrapper = new ServiceBusSenderAsyncClientWrapper(senderClient); // create the operations serviceBusSenderOperations = new ServiceBusSenderOperations(senderClientWrapper); } @Override public boolean process(Exchange exchange, AsyncCallback callback) { try { invokeOperation(configurationOptionsProxy.getServiceBusProducerOperationDefinition(exchange), exchange, callback); return false; } catch (Exception e) { exchange.setException(e); callback.done(true); return true; } } @Override protected void doStop() throws Exception { if (senderClientWrapper != null) { // shutdown async client senderClientWrapper.close(); } super.doStop(); } @Override public ServiceBusEndpoint getEndpoint() { return (ServiceBusEndpoint) super.getEndpoint(); } public ServiceBusConfiguration getConfiguration() { return getEndpoint().getConfiguration(); } private void bind(ServiceBusProducerOperationDefinition operation, BiConsumer<Exchange, AsyncCallback> fn) { operationsToExecute.put(operation, fn); } /** * Entry method that selects the appropriate ServiceBusProducerOperationDefinition operation and executes it */ private void invokeOperation( final ServiceBusProducerOperationDefinition operation, final Exchange exchange, final AsyncCallback callback) { final ServiceBusProducerOperationDefinition operationsToInvoke; // we put sendMessage operation as default in case no operation has been selected if (ObjectHelper.isEmpty(operation)) { operationsToInvoke = ServiceBusProducerOperationDefinition.sendMessages; } else { operationsToInvoke = operation; } final BiConsumer<Exchange, AsyncCallback> fnToInvoke = operationsToExecute.get(operationsToInvoke); if (fnToInvoke != null) { fnToInvoke.accept(exchange, callback); } else { throw new RuntimeCamelException("Operation not supported. Value: " + operationsToInvoke); } } @SuppressWarnings("unchecked") private BiConsumer<Exchange, AsyncCallback> sendMessages() { return (exchange, callback) -> { final Object inputBody = exchange.getMessage().getBody(); Mono<Void> sendMessageAsync; if (exchange.getMessage().getBody() instanceof Iterable) { sendMessageAsync = serviceBusSenderOperations.sendMessages(convertBodyToList((Iterable<Object>) inputBody), configurationOptionsProxy.getServiceBusTransactionContext(exchange)); } else { sendMessageAsync = serviceBusSenderOperations.sendMessages(exchange.getMessage().getBody(String.class), configurationOptionsProxy.getServiceBusTransactionContext(exchange)); } subscribeToMono(sendMessageAsync, exchange, noop -> { }, callback); }; } @SuppressWarnings("unchecked") private BiConsumer<Exchange, AsyncCallback> scheduleMessages() { return (exchange, callback) -> { final Object inputBody = exchange.getMessage().getBody(); Mono<List<Long>> scheduleMessagesAsync; if (exchange.getMessage().getBody() instanceof Iterable) { scheduleMessagesAsync = serviceBusSenderOperations.scheduleMessages(convertBodyToList((Iterable<Object>) inputBody), configurationOptionsProxy.getScheduledEnqueueTime(exchange), configurationOptionsProxy.getServiceBusTransactionContext(exchange)); } else { scheduleMessagesAsync = serviceBusSenderOperations.scheduleMessages(exchange.getMessage().getBody(String.class), configurationOptionsProxy.getScheduledEnqueueTime(exchange), configurationOptionsProxy.getServiceBusTransactionContext(exchange)); } subscribeToMono(scheduleMessagesAsync, exchange, sequenceNumbers -> exchange.getMessage().setBody(sequenceNumbers), callback); }; } private List<String> convertBodyToList(final Iterable<Object> inputBody) { return StreamSupport.stream(inputBody.spliterator(), false) .map(body -> getEndpoint().getCamelContext().getTypeConverter().convertTo(String.class, body)) .collect(Collectors.toList()); } private <T> void subscribeToMono( final Mono<T> inputMono, final Exchange exchange, final Consumer<T> resultsCallback, final AsyncCallback callback) { inputMono .subscribe(resultsCallback, error -> { // error but we continue if (LOG.isDebugEnabled()) { LOG.debug("Error processing async exchange with error: {}", error.getMessage()); } exchange.setException(error); callback.done(false); }, () -> { // we are done from everything, so mark it as sync done LOG.trace("All events with exchange have been sent successfully."); callback.done(false); }); } }
/* * oxAuth is available under the MIT License (2008). See http://opensource.org/licenses/MIT for full text. * * Copyright (c) 2014, Gluu */ package org.xdi.oxauth.model.common; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import org.xdi.oxauth.model.authorize.JwtAuthorizationRequest; import org.xdi.oxauth.model.authorize.ScopeChecker; import org.xdi.oxauth.model.config.ConfigurationFactory; import org.xdi.oxauth.model.federation.FederationTrust; import org.xdi.oxauth.model.federation.FederationTrustStatus; import org.xdi.oxauth.model.ldap.TokenLdap; import org.xdi.oxauth.model.registration.Client; import org.xdi.oxauth.service.FederationDataService; import org.xdi.oxauth.service.ScopeService; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.Set; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CopyOnWriteArraySet; /** * @author Yuriy Zabrovarnyy * @author Javier Rojas Blum * @version 0.9, 08/14/2014 */ public abstract class AbstractAuthorizationGrant implements IAuthorizationGrant { private static final Logger LOGGER = Logger.getLogger(AbstractAuthorizationGrant.class); private final User user; private final AuthorizationGrantType authorizationGrantType; private final Client client; private final Set<String> scopes; private String grantId; private JwtAuthorizationRequest jwtAuthorizationRequest; private Date authenticationTime; private TokenLdap tokenLdap; private AccessToken longLivedAccessToken; private IdToken idToken; private AuthorizationCode authorizationCode; private String nonce; private String acrValues; protected final ConcurrentMap<String, AccessToken> accessTokens = new ConcurrentHashMap<String, AccessToken>(); protected final ConcurrentMap<String, RefreshToken> refreshTokens = new ConcurrentHashMap<String, RefreshToken>(); protected AbstractAuthorizationGrant(User user, AuthorizationGrantType authorizationGrantType, Client client, Date authenticationTime) { this.authenticationTime = authenticationTime != null ? new Date(authenticationTime.getTime()) : null; this.user = user; this.authorizationGrantType = authorizationGrantType; this.client = client; this.scopes = new CopyOnWriteArraySet<String>(); this.grantId = UUID.randomUUID().toString(); } @Override public synchronized String getGrantId() { return grantId; } @Override public synchronized void setGrantId(String p_grantId) { grantId = p_grantId; } /** * Returns the {@link AuthorizationCode}. * * @return The authorization code. */ @Override public AuthorizationCode getAuthorizationCode() { return authorizationCode; } /** * Sets the {@link AuthorizationCode}. * * @param authorizationCode The authorization code. */ @Override public void setAuthorizationCode(AuthorizationCode authorizationCode) { this.authorizationCode = authorizationCode; } @Override public String getNonce() { return nonce; } @Override public void setNonce(String nonce) { this.nonce = nonce; } /** * Returns a list with all the issued refresh tokens codes. * * @return List with all the issued refresh tokens codes. */ @Override public Set<String> getRefreshTokensCodes() { return refreshTokens.keySet(); } /** * Returns a list with all the issued access tokens codes. * * @return List with all the issued access tokens codes. */ @Override public Set<String> getAccessTokensCodes() { return accessTokens.keySet(); } /** * Returns a list with all the issued access tokens. * * @return List with all the issued access tokens. */ @Override public List<AccessToken> getAccessTokens() { return new ArrayList<AccessToken>(accessTokens.values()); } @Override public void setScopes(Collection<String> scopes) { this.scopes.clear(); this.scopes.addAll(scopes); } @Override public AccessToken getLongLivedAccessToken() { return longLivedAccessToken; } @Override public void setLongLivedAccessToken(AccessToken longLivedAccessToken) { this.longLivedAccessToken = longLivedAccessToken; } @Override public IdToken getIdToken() { return idToken; } @Override public void setIdToken(IdToken idToken) { this.idToken = idToken; } @Override public TokenLdap getTokenLdap() { return tokenLdap; } @Override public void setTokenLdap(TokenLdap p_tokenLdap) { this.tokenLdap = p_tokenLdap; } /** * Returns the resource owner's. * * @return The resource owner's. */ @Override public User getUser() { return user; } public String getAcrValues() { return acrValues; } public void setAcrValues(String acrValues) { this.acrValues = acrValues; } /** * Checks the scopes policy configured according to the type of the * authorization grant to limit the issued token scopes. * * @param scope A space-delimited list of values in which the order of * values does not matter. * @return A space-delimited list of scopes */ @Override public String checkScopesPolicy(String requestedScopes) { this.scopes.clear(); Set<String> grantedScopes = ScopeChecker.instance().checkScopesPolicy(client, requestedScopes); this.scopes.addAll(grantedScopes); final StringBuilder grantedScopesSb = new StringBuilder(); for (String scope : scopes) { grantedScopesSb.append(" ").append(scope); } final String grantedScopesSt = grantedScopesSb.toString().trim(); return grantedScopesSt; } @Override public AccessToken createAccessToken() { int lifetime = ConfigurationFactory.instance().getConfiguration().getShortLivedAccessTokenLifetime(); AccessToken accessToken = new AccessToken(lifetime); accessToken.setAuthMode(getAcrValues()); return accessToken; } @Override public AccessToken createLongLivedAccessToken() { int lifetime = ConfigurationFactory.instance().getConfiguration().getLongLivedAccessTokenLifetime(); AccessToken accessToken = new AccessToken(lifetime); accessToken.setAuthMode(getAcrValues()); return accessToken; } @Override public RefreshToken createRefreshToken() { int lifetime = ConfigurationFactory.instance().getConfiguration().getRefreshTokenLifetime(); RefreshToken refreshToken = new RefreshToken(lifetime); refreshToken.setAuthMode(getAcrValues()); return refreshToken; } @Override public String getUserId() { if (user == null) { return null; } return user.getUserId(); } @Override public String getUserDn() { if (user == null) { return null; } return user.getDn(); } /** * Returns the {@link AuthorizationGrantType}. * * @return The authorization grant type. */ @Override public AuthorizationGrantType getAuthorizationGrantType() { return authorizationGrantType; } /** * Returns the {@link org.xdi.oxauth.model.registration.Client}. An * application making protected resource requests on behalf of the resource * owner and with its authorization. * * @return The client. */ @Override public Client getClient() { return client; } @Override public String getClientId() { if (client == null) { return null; } return client.getClientId(); } @Override public String getClientDn() { if (client == null) { return null; } return client.getDn(); } @Override public Date getAuthenticationTime() { return authenticationTime != null ? new Date(authenticationTime.getTime()) : null; } public void setAuthenticationTime(Date authenticationTime) { this.authenticationTime = authenticationTime != null ? new Date(authenticationTime.getTime()) : null; } /** * Returns a list of the scopes granted to the client. * * @return List of the scopes granted to the client. */ @Override public Set<String> getScopes() { return scopes; } @Override public JwtAuthorizationRequest getJwtAuthorizationRequest() { return jwtAuthorizationRequest; } @Override public void setJwtAuthorizationRequest(JwtAuthorizationRequest p_jwtAuthorizationRequest) { jwtAuthorizationRequest = p_jwtAuthorizationRequest; } @Override public void setAccessTokens(List<AccessToken> accessTokens) { put(this.accessTokens, accessTokens); } private static <T extends AbstractToken> void put(ConcurrentMap<String, T> p_map, List<T> p_list) { p_map.clear(); if (p_list != null && !p_list.isEmpty()) { for (T t : p_list) { p_map.put(t.getCode(), t); } } } /** * Returns a list with all the issued refresh tokens. * * @return List with all the issued refresh tokens. */ @Override public List<RefreshToken> getRefreshTokens() { return new ArrayList<RefreshToken>(refreshTokens.values()); } @Override public void setRefreshTokens(List<RefreshToken> refreshTokens) { put(this.refreshTokens, refreshTokens); } /** * Gets the refresh token instance from the refresh token list given its * code. * * @param refreshTokenCode The code of the refresh token. * @return The refresh token instance or * <code>null</code> if not found. */ @Override public RefreshToken getRefreshToken(String refreshTokenCode) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Looking for the refresh token: " + refreshTokenCode + " for an authorization grant of type: " + getAuthorizationGrantType()); } return refreshTokens.get(refreshTokenCode); } /** * Gets the access token instance from the id token list or the access token * list given its code. * * @param tokenCode The code of the access token. * @return The access token instance or * <code>null</code> if not found. */ @Override public AbstractToken getAccessToken(String tokenCode) { final IdToken idToken = getIdToken(); if (idToken != null) { if (idToken.getCode().equals(tokenCode)) { return idToken; } } final AccessToken longLivedAccessToken = getLongLivedAccessToken(); if (longLivedAccessToken != null) { if (longLivedAccessToken.getCode().equals(tokenCode)) { return longLivedAccessToken; } } return accessTokens.get(tokenCode); } }
package org.javaruntype.type; import java.io.Serializable; import java.lang.reflect.Method; import java.util.HashMap; import java.util.List; import java.util.Map; import junit.framework.TestCase; import org.javaruntype.exceptions.TypeValidationException; public class TestTypeFromJavaLangReflectType extends TestCase { public String m1() { return null; } public List<String> m2() { return null; } public <E> Map<String,List<E>> m3() { return null; } public <R extends Serializable, E> Map<R,List<? extends E>> m4() { return null; } public <R> R m5() { return null; } public <R extends Serializable> R m6() { return null; } public List<? extends String> m7() { return null; } public List<? extends Map<? extends Serializable, List<String>>> m8() { return null; } public <E> List<? extends Map<? extends Serializable, List<E>>> m9() { return null; } @SuppressWarnings("unused") public void testForJavaLangReflectType() throws Exception { final Method m1 = TestTypeFromJavaLangReflectType.class.getMethod("m1"); final Method m2 = TestTypeFromJavaLangReflectType.class.getMethod("m2"); final Method m3 = TestTypeFromJavaLangReflectType.class.getMethod("m3"); final Method m4 = TestTypeFromJavaLangReflectType.class.getMethod("m4"); final Method m5 = TestTypeFromJavaLangReflectType.class.getMethod("m5"); final Method m6 = TestTypeFromJavaLangReflectType.class.getMethod("m6"); final Method m7 = TestTypeFromJavaLangReflectType.class.getMethod("m7"); final Method m8 = TestTypeFromJavaLangReflectType.class.getMethod("m8"); final Method m9 = TestTypeFromJavaLangReflectType.class.getMethod("m9"); final java.lang.reflect.Type tm1 = m1.getGenericReturnType(); final java.lang.reflect.Type tm2 = m2.getGenericReturnType(); final java.lang.reflect.Type tm3 = m3.getGenericReturnType(); final java.lang.reflect.Type tm4 = m4.getGenericReturnType(); final java.lang.reflect.Type tm5 = m5.getGenericReturnType(); final java.lang.reflect.Type tm6 = m6.getGenericReturnType(); final java.lang.reflect.Type tm7 = m7.getGenericReturnType(); final java.lang.reflect.Type tm8 = m8.getGenericReturnType(); final java.lang.reflect.Type tm9 = m9.getGenericReturnType(); final Type<?> et1 = Types.forName("java.lang.String"); final Type<?> et2 = Types.forName("java.util.List<java.lang.String>"); final Type<?> et3a = Types.forName("java.util.Map<String,List<String>>"); final Type<?> et3b = Types.forName("java.util.Map<String,List<Integer>>"); final Type<?> et4a = Types.forName("java.util.Map<Integer,java.util.List<? extends Serializable>>"); final Type<?> et4b = Types.forName("java.util.Map<Number,java.util.List<? extends Integer>>"); final Type<?> et4c = Types.forName("java.util.Map<Integer,java.util.List<? extends java.util.List<? extends Number>>>"); final Type<?> et4d = Types.forName("java.util.Map<java.util.List<?>,java.util.List<? extends java.util.List<? extends Number>>>"); final Type<?> et5a = Types.forName("java.lang.String"); final Type<?> et5b = Types.forName("java.lang.Integer"); final Type<?> et5c = Types.forName("java.io.Serializable"); final Type<?> et5d = Types.forName("java.util.List<?>"); final Type<?> et6a = Types.forName("java.lang.String"); final Type<?> et6b = Types.forName("java.io.Serializable"); final Type<?> et6c = Types.forName("java.util.List<?>"); final Type<?> et7 = Types.forName("java.util.List<? extends java.lang.String>"); final Type<?> et8 = Types.forName("java.util.List<? extends java.util.Map<? extends java.io.Serializable, java.util.List<java.lang.String>>>"); final Type<?> et9a = Types.forName("java.util.List<? extends java.util.Map<? extends java.io.Serializable, java.util.List<java.lang.String>>>"); final Type<?> et9b = Types.forName("java.util.List<? extends java.util.Map<? extends java.io.Serializable, java.util.List<java.lang.Integer>>>"); final Type<?> et9c = Types.forName("java.util.List<? extends java.util.Map<? extends java.io.Serializable, java.util.List<java.util.List<Integer>>>>"); final Type<?> jt1 = Types.forJavaLangReflectType(tm1); assertEquals(et1, jt1); final Type<?> jt2 = Types.forJavaLangReflectType(tm2); assertEquals(et2, jt2); final Map<String,Type<?>> jt3av = new HashMap<String, Type<?>>(); jt3av.put("E", Types.STRING); final Type<?> jt3a = Types.forJavaLangReflectType(tm3, jt3av); assertEquals(et3a, jt3a); final Map<String,Type<?>> jt3bv = new HashMap<String, Type<?>>(); jt3bv.put("E", Types.INTEGER); final Type<?> jt3b = Types.forJavaLangReflectType(tm3, jt3bv); assertEquals(et3b, jt3b); final Map<String,Type<?>> jt4av = new HashMap<String, Type<?>>(); jt4av.put("R", Types.INTEGER); jt4av.put("E", Types.SERIALIZABLE); final Type<?> jt4a = Types.forJavaLangReflectType(tm4, jt4av); assertEquals(et4a, jt4a); final Map<String,Type<?>> jt4bv = new HashMap<String, Type<?>>(); jt4bv.put("R", Types.NUMBER); jt4bv.put("E", Types.INTEGER); final Type<?> jt4b = Types.forJavaLangReflectType(tm4, jt4bv); assertEquals(et4b, jt4b); final Map<String,Type<?>> jt4cv = new HashMap<String, Type<?>>(); jt4cv.put("R", Types.INTEGER); jt4cv.put("E", Types.listOf(TypeParameters.forExtendsType(Types.NUMBER))); final Type<?> jt4c = Types.forJavaLangReflectType(tm4, jt4cv); assertEquals(et4c, jt4c); final Map<String,Type<?>> jt4dv = new HashMap<String, Type<?>>(); jt4dv.put("R", Types.LIST_OF_UNKNOWN); jt4dv.put("E", Types.listOf(TypeParameters.forExtendsType(Types.NUMBER))); try { Types.forJavaLangReflectType(tm4, jt4dv); assertTrue(false); } catch (TypeValidationException e) { assertTrue(true); } final Map<String,Type<?>> jt5av = new HashMap<String, Type<?>>(); jt5av.put("R", Types.STRING); final Type<?> jt5a = Types.forJavaLangReflectType(tm5, jt5av); assertEquals(et5a, jt5a); final Map<String,Type<?>> jt5bv = new HashMap<String, Type<?>>(); jt5bv.put("R", Types.INTEGER); final Type<?> jt5b = Types.forJavaLangReflectType(tm5, jt5bv); assertEquals(et5b, jt5b); final Map<String,Type<?>> jt5cv = new HashMap<String, Type<?>>(); jt5cv.put("R", Types.SERIALIZABLE); final Type<?> jt5c = Types.forJavaLangReflectType(tm5, jt5cv); assertEquals(et5c, jt5c); final Map<String,Type<?>> jt5dv = new HashMap<String, Type<?>>(); jt5dv.put("R", Types.LIST_OF_UNKNOWN); final Type<?> jt5d = Types.forJavaLangReflectType(tm5, jt5dv); assertEquals(et5d, jt5d); final Map<String,Type<?>> jt6av = new HashMap<String, Type<?>>(); jt6av.put("R", Types.STRING); final Type<?> jt6a = Types.forJavaLangReflectType(tm6, jt6av); assertEquals(et6a, jt6a); final Map<String,Type<?>> jt6bv = new HashMap<String, Type<?>>(); jt6bv.put("R", Types.SERIALIZABLE); final Type<?> jt6b = Types.forJavaLangReflectType(tm6, jt6bv); assertEquals(et6b, jt6b); final Map<String,Type<?>> jt6cv = new HashMap<String, Type<?>>(); jt6cv.put("R", Types.LIST_OF_UNKNOWN); try { Types.forJavaLangReflectType(tm6, jt6cv); assertTrue(false); } catch (TypeValidationException e) { assertTrue(true); } final Type<?> jt7 = Types.forJavaLangReflectType(tm7); assertEquals(et7, jt7); final Type<?> jt8 = Types.forJavaLangReflectType(tm8); assertEquals(et8, jt8); final Map<String,Type<?>> jt9av = new HashMap<String, Type<?>>(); jt9av.put("E", Types.STRING); final Type<?> jt9a = Types.forJavaLangReflectType(tm9, jt9av); assertEquals(et9a, jt9a); final Map<String,Type<?>> jt9bv = new HashMap<String, Type<?>>(); jt9bv.put("E", Types.INTEGER); final Type<?> jt9b = Types.forJavaLangReflectType(tm9, jt9bv); assertEquals(et9b, jt9b); final Map<String,Type<?>> jt9cv = new HashMap<String, Type<?>>(); jt9cv.put("E", Types.listOf(Types.INTEGER)); final Type<?> jt9c = Types.forJavaLangReflectType(tm9, jt9cv); assertEquals(et9c, jt9c); } }
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.base.Preconditions.checkArgument; import static org.junit.contrib.truth.Truth.ASSERT; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.collect.testing.DerivedComparable; import com.google.common.collect.testing.Helpers; import com.google.common.collect.testing.NavigableMapTestSuiteBuilder; import com.google.common.collect.testing.NavigableSetTestSuiteBuilder; import com.google.common.collect.testing.SampleElements; import com.google.common.collect.testing.TestSortedMapGenerator; import com.google.common.collect.testing.TestStringSetGenerator; import com.google.common.collect.testing.TestStringSortedSetGenerator; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.collect.testing.features.MapFeature; import com.google.common.collect.testing.google.SortedSetMultimapTestSuiteBuilder; import com.google.common.collect.testing.google.TestStringSetMultimapGenerator; import com.google.common.testing.SerializableTester; import junit.framework.Test; import junit.framework.TestSuite; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NavigableMap; import java.util.NavigableSet; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; /** * Unit tests for {@code TreeMultimap} with natural ordering. * * @author Jared Levy */ @GwtCompatible(emulated = true) public class TreeMultimapNaturalTest extends AbstractSetMultimapTest { @GwtIncompatible("suite") public static Test suite() { TestSuite suite = new TestSuite(); // TODO(user): should we force TreeMultimap to be more thorough about checking nulls? suite.addTest(SortedSetMultimapTestSuiteBuilder.using(new TestStringSetMultimapGenerator() { @Override protected SetMultimap<String, String> create(Entry<String, String>[] entries) { SetMultimap<String, String> multimap = TreeMultimap.create( Ordering.natural().nullsFirst(), Ordering.natural().nullsFirst()); for (Entry<String, String> entry : entries) { multimap.put(entry.getKey(), entry.getValue()); } return multimap; } @Override public Iterable<Entry<String, String>> order(List<Entry<String, String>> insertionOrder) { return new Ordering<Entry<String, String>>() { @Override public int compare(Entry<String, String> left, Entry<String, String> right) { return ComparisonChain.start() .compare(left.getKey(), right.getKey(), Ordering.natural().nullsFirst()) .compare(left.getValue(), right.getValue(), Ordering.natural().nullsFirst()) .result(); } }.sortedCopy(insertionOrder); } }) .named("TreeMultimap nullsFirst") .withFeatures( MapFeature.ALLOWS_NULL_KEYS, MapFeature.ALLOWS_NULL_VALUES, MapFeature.GENERAL_PURPOSE, MapFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION, CollectionFeature.KNOWN_ORDER, CollectionFeature.SERIALIZABLE, CollectionSize.ANY) .createTestSuite()); suite.addTest(NavigableSetTestSuiteBuilder.using(new TestStringSortedSetGenerator() { @Override protected NavigableSet<String> create(String[] elements) { TreeMultimap<String, Integer> multimap = TreeMultimap.create( Ordering.natural().nullsFirst(), Ordering.natural()); for (int i = 0; i < elements.length; i++) { multimap.put(elements[i], i); } return multimap.keySet(); } @Override public List<String> order(List<String> insertionOrder) { return Ordering.natural().nullsFirst().sortedCopy(insertionOrder); } }) .named("TreeMultimap.keySet") .withFeatures( CollectionFeature.ALLOWS_NULL_VALUES, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.KNOWN_ORDER, CollectionSize.ANY) .createTestSuite()); suite.addTest(NavigableMapTestSuiteBuilder.using( new TestSortedMapGenerator<String, Collection<String>>() { @Override public String[] createKeyArray(int length) { return new String[length]; } @SuppressWarnings("unchecked") @Override public Collection<String>[] createValueArray(int length) { return new Collection[length]; } @Override public SampleElements<Entry<String, Collection<String>>> samples() { return new SampleElements<Entry<String, Collection<String>>>( Helpers.mapEntry("a", (Collection<String>) ImmutableSortedSet.of("alex")), Helpers.mapEntry("b", (Collection<String>) ImmutableSortedSet.of("bob", "bagel")), Helpers.mapEntry("c", (Collection<String>) ImmutableSortedSet.of("carl", "carol")), Helpers.mapEntry("d", (Collection<String>) ImmutableSortedSet.of("david", "dead")), Helpers.mapEntry("e", (Collection<String>) ImmutableSortedSet.of("eric", "elaine"))); } @SuppressWarnings("unchecked") @Override public Entry<String, Collection<String>>[] createArray(int length) { return new Entry[length]; } @Override public Iterable<Entry<String, Collection<String>>> order( List<Entry<String, Collection<String>>> insertionOrder) { return new Ordering<Entry<String, ?>>() { @Override public int compare(Entry<String, ?> left, Entry<String, ?> right) { return left.getKey().compareTo(right.getKey()); } }.sortedCopy(insertionOrder); } @Override public NavigableMap<String, Collection<String>> create(Object... elements) { TreeMultimap<String, String> multimap = TreeMultimap.create(); for (Object o : elements) { @SuppressWarnings("unchecked") Entry<String, Collection<String>> entry = (Entry<String, Collection<String>>) o; checkArgument(!multimap.containsKey(entry.getKey())); multimap.putAll(entry.getKey(), entry.getValue()); } return multimap.asMap(); } @Override public Entry<String, Collection<String>> belowSamplesLesser() { return Helpers.mapEntry("-- a", (Collection<String>) ImmutableSortedSet.of("--below")); } @Override public Entry<String, Collection<String>> belowSamplesGreater() { return Helpers.mapEntry("-- b", (Collection<String>) ImmutableSortedSet.of("--below")); } @Override public Entry<String, Collection<String>> aboveSamplesLesser() { return Helpers.mapEntry("~~ b", (Collection<String>) ImmutableSortedSet.of("~above")); } @Override public Entry<String, Collection<String>> aboveSamplesGreater() { return Helpers.mapEntry("~~ c", (Collection<String>) ImmutableSortedSet.of("~above")); } }) .named("TreeMultimap.asMap") .withFeatures( MapFeature.SUPPORTS_REMOVE, MapFeature.REJECTS_DUPLICATES_AT_CREATION, CollectionFeature.KNOWN_ORDER, CollectionSize.ANY) .createTestSuite()); suite.addTest(NavigableSetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { TreeMultimap<Integer, String> multimap = TreeMultimap.create( Ordering.natural(), Ordering.natural().nullsFirst()); multimap.putAll(1, Arrays.asList(elements)); return multimap.get(1); } @Override public List<String> order(List<String> insertionOrder) { return Ordering.natural().nullsFirst().sortedCopy(insertionOrder); } }) .named("TreeMultimap.get") .withFeatures( CollectionFeature.ALLOWS_NULL_VALUES, CollectionFeature.GENERAL_PURPOSE, CollectionFeature.KNOWN_ORDER, CollectionSize.ANY) .createTestSuite()); suite.addTest(NavigableSetTestSuiteBuilder.using(new TestStringSetGenerator() { @Override protected Set<String> create(String[] elements) { TreeMultimap<Integer, String> multimap = TreeMultimap.create( Ordering.natural(), Ordering.natural().nullsFirst()); multimap.putAll(1, Arrays.asList(elements)); return (Set<String>) multimap.asMap().entrySet().iterator().next().getValue(); } @Override public List<String> order(List<String> insertionOrder) { return Ordering.natural().nullsFirst().sortedCopy(insertionOrder); } }) .named("TreeMultimap.asMap.entrySet collection") .withFeatures( CollectionFeature.ALLOWS_NULL_VALUES, CollectionFeature.GENERAL_PURPOSE, CollectionFeature.KNOWN_ORDER, CollectionSize.ONE, CollectionSize.SEVERAL) .createTestSuite()); suite.addTestSuite(TreeMultimapNaturalTest.class); return suite; } @Override protected Multimap<String, Integer> create() { return TreeMultimap.create(); } /* Null keys and values aren't supported. */ @Override protected String nullKey() { return "null"; } @Override protected Integer nullValue() { return 42; } /** * Create and populate a {@code TreeMultimap} with the natural ordering of * keys and values. */ private TreeMultimap<String, Integer> createPopulate() { TreeMultimap<String, Integer> multimap = TreeMultimap.create(); multimap.put("google", 2); multimap.put("google", 6); multimap.put("foo", 3); multimap.put("foo", 1); multimap.put("foo", 7); multimap.put("tree", 4); multimap.put("tree", 0); return multimap; } public void testToString() { assertEquals("{bar=[1, 2, 3], foo=[-1, 1, 2, 3, 4]}", createSample().toString()); } public void testOrderedGet() { TreeMultimap<String, Integer> multimap = createPopulate(); ASSERT.that(multimap.get("foo")).hasContentsInOrder(1, 3, 7); ASSERT.that(multimap.get("google")).hasContentsInOrder(2, 6); ASSERT.that(multimap.get("tree")).hasContentsInOrder(0, 4); } public void testOrderedKeySet() { TreeMultimap<String, Integer> multimap = createPopulate(); ASSERT.that(multimap.keySet()).hasContentsInOrder("foo", "google", "tree"); } public void testOrderedAsMapEntries() { TreeMultimap<String, Integer> multimap = createPopulate(); Iterator<Map.Entry<String, Collection<Integer>>> iterator = multimap.asMap().entrySet().iterator(); Map.Entry<String, Collection<Integer>> entry = iterator.next(); assertEquals("foo", entry.getKey()); ASSERT.that(entry.getValue()).hasContentsAnyOrder(1, 3, 7); entry = iterator.next(); assertEquals("google", entry.getKey()); ASSERT.that(entry.getValue()).hasContentsAnyOrder(2, 6); entry = iterator.next(); assertEquals("tree", entry.getKey()); ASSERT.that(entry.getValue()).hasContentsAnyOrder(0, 4); } public void testOrderedEntries() { TreeMultimap<String, Integer> multimap = createPopulate(); ASSERT.that(multimap.entries()).hasContentsInOrder( Maps.immutableEntry("foo", 1), Maps.immutableEntry("foo", 3), Maps.immutableEntry("foo", 7), Maps.immutableEntry("google", 2), Maps.immutableEntry("google", 6), Maps.immutableEntry("tree", 0), Maps.immutableEntry("tree", 4)); } public void testOrderedValues() { TreeMultimap<String, Integer> multimap = createPopulate(); ASSERT.that(multimap.values()).hasContentsInOrder( 1, 3, 7, 2, 6, 0, 4); } public void testMultimapConstructor() { Multimap<String, Integer> multimap = createSample(); TreeMultimap<String, Integer> copy = TreeMultimap.create(multimap); assertEquals(multimap, copy); } private static final Comparator<Double> KEY_COMPARATOR = Ordering.natural(); private static final Comparator<Double> VALUE_COMPARATOR = Ordering.natural().reverse().nullsFirst(); /** * Test that creating one TreeMultimap from another does not copy the * comparators from the source TreeMultimap. */ public void testCreateFromTreeMultimap() { Multimap<Double, Double> tree = TreeMultimap.create(KEY_COMPARATOR, VALUE_COMPARATOR); tree.put(1.0, 2.0); tree.put(2.0, 3.0); tree.put(3.0, 4.0); tree.put(4.0, 5.0); TreeMultimap<Double, Double> copyFromTree = TreeMultimap.create(tree); assertEquals(tree, copyFromTree); assertSame(Ordering.natural(), copyFromTree.keyComparator()); assertSame(Ordering.natural(), copyFromTree.valueComparator()); assertSame(Ordering.natural(), copyFromTree.get(1.0).comparator()); } /** * Test that creating one TreeMultimap from a non-TreeMultimap * results in natural ordering. */ public void testCreateFromHashMultimap() { Multimap<Double, Double> hash = HashMultimap.create(); hash.put(1.0, 2.0); hash.put(2.0, 3.0); hash.put(3.0, 4.0); hash.put(4.0, 5.0); TreeMultimap<Double, Double> copyFromHash = TreeMultimap.create(hash); assertEquals(hash, copyFromHash); assertEquals(Ordering.natural(), copyFromHash.keyComparator()); assertEquals(Ordering.natural(), copyFromHash.valueComparator()); } /** * Test that creating one TreeMultimap from a SortedSetMultimap uses natural * ordering. */ public void testCreateFromSortedSetMultimap() { SortedSetMultimap<Double, Double> tree = TreeMultimap.create(KEY_COMPARATOR, VALUE_COMPARATOR); tree.put(1.0, 2.0); tree.put(2.0, 3.0); tree.put(3.0, 4.0); tree.put(4.0, 5.0); SortedSetMultimap<Double, Double> sorted = Multimaps.unmodifiableSortedSetMultimap(tree); TreeMultimap<Double, Double> copyFromSorted = TreeMultimap.create(sorted); assertEquals(tree, copyFromSorted); assertSame(Ordering.natural(), copyFromSorted.keyComparator()); assertSame(Ordering.natural(), copyFromSorted.valueComparator()); assertSame(Ordering.natural(), copyFromSorted.get(1.0).comparator()); } public void testComparators() { TreeMultimap<String, Integer> multimap = TreeMultimap.create(); assertEquals(Ordering.natural(), multimap.keyComparator()); assertEquals(Ordering.natural(), multimap.valueComparator()); } @GwtIncompatible("SerializableTester") public void testExplicitComparatorSerialization() { TreeMultimap<String, Integer> multimap = createPopulate(); TreeMultimap<String, Integer> copy = SerializableTester.reserializeAndAssert(multimap); ASSERT.that(copy.values()).hasContentsInOrder(1, 3, 7, 2, 6, 0, 4); ASSERT.that(copy.keySet()).hasContentsInOrder("foo", "google", "tree"); assertEquals(multimap.keyComparator(), copy.keyComparator()); assertEquals(multimap.valueComparator(), copy.valueComparator()); } @GwtIncompatible("SerializableTester") public void testTreeMultimapDerived() { TreeMultimap<DerivedComparable, DerivedComparable> multimap = TreeMultimap.create(); assertEquals(ImmutableMultimap.of(), multimap); multimap.put(new DerivedComparable("foo"), new DerivedComparable("f")); multimap.put(new DerivedComparable("foo"), new DerivedComparable("o")); multimap.put(new DerivedComparable("foo"), new DerivedComparable("o")); multimap.put(new DerivedComparable("bar"), new DerivedComparable("b")); multimap.put(new DerivedComparable("bar"), new DerivedComparable("a")); multimap.put(new DerivedComparable("bar"), new DerivedComparable("r")); ASSERT.that(multimap.keySet()).hasContentsInOrder( new DerivedComparable("bar"), new DerivedComparable("foo")); ASSERT.that(multimap.values()).hasContentsInOrder( new DerivedComparable("a"), new DerivedComparable("b"), new DerivedComparable("r"), new DerivedComparable("f"), new DerivedComparable("o")); assertEquals(Ordering.natural(), multimap.keyComparator()); assertEquals(Ordering.natural(), multimap.valueComparator()); SerializableTester.reserializeAndAssert(multimap); } @GwtIncompatible("SerializableTester") public void testTreeMultimapNonGeneric() { TreeMultimap<LegacyComparable, LegacyComparable> multimap = TreeMultimap.create(); assertEquals(ImmutableMultimap.of(), multimap); multimap.put(new LegacyComparable("foo"), new LegacyComparable("f")); multimap.put(new LegacyComparable("foo"), new LegacyComparable("o")); multimap.put(new LegacyComparable("foo"), new LegacyComparable("o")); multimap.put(new LegacyComparable("bar"), new LegacyComparable("b")); multimap.put(new LegacyComparable("bar"), new LegacyComparable("a")); multimap.put(new LegacyComparable("bar"), new LegacyComparable("r")); ASSERT.that(multimap.keySet()).hasContentsInOrder( new LegacyComparable("bar"), new LegacyComparable("foo")); ASSERT.that(multimap.values()).hasContentsInOrder( new LegacyComparable("a"), new LegacyComparable("b"), new LegacyComparable("r"), new LegacyComparable("f"), new LegacyComparable("o")); assertEquals(Ordering.natural(), multimap.keyComparator()); assertEquals(Ordering.natural(), multimap.valueComparator()); SerializableTester.reserializeAndAssert(multimap); } public void testTreeMultimapAsMapSorted() { TreeMultimap<String, Integer> multimap = createPopulate(); SortedMap<String, Collection<Integer>> asMap = multimap.asMap(); assertEquals(Ordering.natural(), asMap.comparator()); assertEquals("foo", asMap.firstKey()); assertEquals("tree", asMap.lastKey()); Set<Integer> fooValues = ImmutableSet.of(1, 3, 7); Set<Integer> googleValues = ImmutableSet.of(2, 6); Set<Integer> treeValues = ImmutableSet.of(4, 0); assertEquals(ImmutableMap.of("google", googleValues, "tree", treeValues), asMap.tailMap("g")); assertEquals(ImmutableMap.of("google", googleValues, "foo", fooValues), asMap.headMap("h")); assertEquals(ImmutableMap.of("google", googleValues), asMap.subMap("g", "h")); } public void testTailSetClear() { TreeMultimap<String, Integer> multimap = TreeMultimap.create(); multimap.put("a", 1); multimap.put("a", 11); multimap.put("b", 2); multimap.put("c", 3); multimap.put("d", 4); multimap.put("e", 5); multimap.put("e", 55); multimap.keySet().tailSet("d").clear(); assertEquals(ImmutableSet.of("a", "b", "c"), multimap.keySet()); assertEquals(4, multimap.size()); assertEquals(4, multimap.values().size()); assertEquals(4, multimap.keys().size()); } @GwtIncompatible("reflection") public void testKeySetBridgeMethods() { for (Method m : TreeMultimap.class.getMethods()) { if (m.getName().equals("keySet") && m.getReturnType().equals(SortedSet.class)) { return; } } fail("No bridge method found"); } @GwtIncompatible("reflection") public void testAsMapBridgeMethods() { for (Method m : TreeMultimap.class.getMethods()) { if (m.getName().equals("asMap") && m.getReturnType().equals(SortedMap.class)) { return; } } } @GwtIncompatible("reflection") public void testGetBridgeMethods() { for (Method m : TreeMultimap.class.getMethods()) { if (m.getName().equals("get") && m.getReturnType().equals(SortedSet.class)) { return; } } fail("No bridge method found"); } }
package com.danielkueffer.filehosting.rest; import java.io.File; import java.io.IOException; import java.io.Serializable; import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import javax.ejb.EJB; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.FormParam; import javax.ws.rs.GET; import javax.ws.rs.HeaderParam; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import org.jboss.resteasy.plugins.providers.multipart.MultipartFormDataInput; import com.danielkueffer.filehosting.service.ConfigurationService; import com.danielkueffer.filehosting.service.FileService; import com.danielkueffer.filehosting.service.UserService; /** * The file rest service * * @author dkueffer * */ @Path("file") public class FileResource implements Serializable { private static final long serialVersionUID = -7978847135919069960L; @EJB FileService fileService; @EJB UserService userService; @EJB ConfigurationService configurationService; @Inject HttpServletRequest request; /** * Get all files from current user * * @return */ @GET @Produces(MediaType.APPLICATION_JSON) public String getAll() { return this.fileService.getFilesFromCurrentUser(); } /** * Get all files from current user under the specified parent directory * * @param parrent * @return */ @GET @Path("{parent}") @Produces(MediaType.APPLICATION_JSON) public String getAllFromParent(@PathParam("parent") int parent) { return this.fileService.getFilesFromCurrentUser(parent); } /** * Upload and create a file * * @param input * @return */ @POST @Path("upload") @Consumes(MediaType.MULTIPART_FORM_DATA + "; charset=UTF-8") public Response uploadFile( @HeaderParam("Content-Length") long contentLength, MultipartFormDataInput input) { int parent = 0; String filename = ""; boolean ieForm = false; long lastModified = 0; try { parent = Integer.valueOf(input.getFormDataMap().get("parent") .get(0).getBodyAsString()); filename = input.getFormDataMap().get("my-filename").get(0) .getBodyAsString(); if (input.getFormDataMap().get("ie-form") != null) { ieForm = true; } if (input.getFormDataMap().get("last-modified") != null) { lastModified = Long.valueOf(input.getFormDataMap() .get("last-modified").get(0).getBodyAsString()); } } catch (IOException e) { e.printStackTrace(); } boolean uploaded = this.fileService.uploadFiles(input.getFormDataMap().get("file"), parent, filename, lastModified, contentLength); if (! uploaded) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .build(); } // Upload send by Internet Explorer below v.10. Redirect to the file // list if (ieForm) { try { URI uri = new URL(this.request.getScheme() + "://" + this.request.getServerName() + ":" + this.request.getServerPort() + this.request.getContextPath()).toURI(); return Response.temporaryRedirect(uri).build(); } catch (MalformedURLException e) { e.printStackTrace(); } catch (URISyntaxException e) { e.printStackTrace(); } } return Response.ok().build(); } /** * Update the filename * * @param fileName * @return */ @POST @Path("update") public Response updateFileName(@FormParam("fileName") String fileName, @FormParam("id") int id) { this.fileService.updateFileName(fileName, id); return Response.ok().build(); } /** * Delete a file * * @param filePath * @return */ @DELETE @Path("{filePath:.*}") public Response deleteFile(@PathParam("filePath") String filePath) { boolean deleted = this.fileService.deleteFile(filePath, false); if (!deleted) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .build(); } return Response.ok().build(); } /** * Delete a file which was deleted on the client * * @param filePath * @return */ @DELETE @Path("client/{filePath:.*}") public Response deleteFileFromClient(@PathParam("filePath") String filePath) { boolean deleted = this.fileService.deleteFile(filePath, true); if (!deleted) { return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .build(); } return Response.ok().build(); } /** * Download a file * * @param filePath * @return */ @GET @Path("download/{filePath:.*}") @Produces(MediaType.APPLICATION_OCTET_STREAM + "; charset=UTF-8") public Response downloadFile(@PathParam("filePath") String filePath) { File file = this.fileService.getDownloadFile(filePath); if (file == null) { return Response.status(Response.Status.NOT_FOUND).build(); } ResponseBuilder rb = Response.ok(file).header("Content-Disposition", "attachment; filename=\"" + file.getName() + "\""); return rb.build(); } /** * Create a folder * * @return */ @POST @Path("folder/add") public Response createFolder(@FormParam("folder") String folderName, @FormParam("parent") int parrent) { this.fileService.createFolder(folderName, parrent); return Response.ok().build(); } /** * Get the deleted files from current user * * @return */ @GET @Path("deleted") @Produces(MediaType.APPLICATION_JSON) public String getFilesDeleted() { return this.fileService.getDeletedFilesFromCurrentUser(); } /** * Update the deleted files * * @param json * @return */ @POST @Path("deleted") public Response updateFilesDeleted(@FormParam("deleted") String deleted) { if (deleted.equals("true")) { this.fileService.updateDeletedFiles(); return Response.ok().build(); } else { return Response.status(Response.Status.INTERNAL_SERVER_ERROR) .build(); } } }
package com.fsck.k9.activity.compose; import java.util.Arrays; import java.util.List; import android.app.LoaderManager; import android.content.Context; import android.content.Intent; import android.os.ParcelFileDescriptor; import com.fsck.k9.Account; import com.fsck.k9.QMail; import com.fsck.k9.K9RobolectricTestRunner; import com.fsck.k9.activity.compose.RecipientMvpView.CryptoSpecialModeDisplayType; import com.fsck.k9.activity.compose.RecipientMvpView.CryptoStatusDisplayType; import com.fsck.k9.activity.compose.RecipientPresenter.CryptoMode; import com.fsck.k9.helper.ReplyToParser; import com.fsck.k9.helper.ReplyToParser.ReplyToAddresses; import com.fsck.k9.mail.Address; import com.fsck.k9.mail.Message; import com.fsck.k9.mail.Message.RecipientType; import com.fsck.k9.message.AutocryptStatusInteractor; import com.fsck.k9.message.AutocryptStatusInteractor.RecipientAutocryptStatus; import com.fsck.k9.message.AutocryptStatusInteractor.RecipientAutocryptStatusType; import com.fsck.k9.message.ComposePgpEnableByDefaultDecider; import com.fsck.k9.message.ComposePgpInlineDecider; import com.fsck.k9.view.RecipientSelectView.Recipient; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.openintents.openpgp.IOpenPgpService2; import org.openintents.openpgp.util.OpenPgpApi; import org.openintents.openpgp.util.OpenPgpServiceConnection; import org.openintents.openpgp.util.ShadowOpenPgpAsyncTask; import org.robolectric.Robolectric; import org.robolectric.annotation.Config; import org.robolectric.shadows.ShadowApplication; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @SuppressWarnings("ConstantConditions") @RunWith(K9RobolectricTestRunner.class) @Config(shadows = {ShadowOpenPgpAsyncTask.class}) public class RecipientPresenterTest { private static final ReplyToAddresses TO_ADDRESSES = new ReplyToAddresses(Address.parse("to@example.org")); private static final List<Address> ALL_TO_ADDRESSES = Arrays.asList(Address.parse("allTo@example.org")); private static final List<Address> ALL_CC_ADDRESSES = Arrays.asList(Address.parse("allCc@example.org")); private static final String CRYPTO_PROVIDER = "crypto_provider"; private static final long CRYPTO_KEY_ID = 123L; private RecipientPresenter recipientPresenter; private ReplyToParser replyToParser; private ComposePgpInlineDecider composePgpInlineDecider; private ComposePgpEnableByDefaultDecider composePgpEnableByDefaultDecider; private Account account; private RecipientMvpView recipientMvpView; private RecipientPresenter.RecipientsChangedListener listener; private AutocryptStatusInteractor autocryptStatusInteractor; private RecipientAutocryptStatus noRecipientsAutocryptResult; @Before public void setUp() throws Exception { Context context = ShadowApplication.getInstance().getApplicationContext(); Robolectric.getBackgroundThreadScheduler().pause(); recipientMvpView = mock(RecipientMvpView.class); account = mock(Account.class); composePgpInlineDecider = mock(ComposePgpInlineDecider.class); composePgpEnableByDefaultDecider = mock(ComposePgpEnableByDefaultDecider.class); autocryptStatusInteractor = mock(AutocryptStatusInteractor.class); replyToParser = mock(ReplyToParser.class); LoaderManager loaderManager = mock(LoaderManager.class); listener = mock(RecipientPresenter.RecipientsChangedListener.class); recipientPresenter = new RecipientPresenter( context, loaderManager, recipientMvpView, account, composePgpInlineDecider, composePgpEnableByDefaultDecider, autocryptStatusInteractor, replyToParser, listener); runBackgroundTask(); noRecipientsAutocryptResult = new RecipientAutocryptStatus(RecipientAutocryptStatusType.NO_RECIPIENTS, null); } @Test public void testInitFromReplyToMessage() throws Exception { Message message = mock(Message.class); when(replyToParser.getRecipientsToReplyTo(message, account)).thenReturn(TO_ADDRESSES); recipientPresenter.initFromReplyToMessage(message, ReplyMode.NORMAL); runBackgroundTask(); verify(recipientMvpView).addRecipients(eq(RecipientType.TO), any(Recipient[].class)); } @Test public void testInitFromReplyToAllMessage() throws Exception { Message message = mock(Message.class); when(replyToParser.getRecipientsToReplyTo(message, account)).thenReturn(TO_ADDRESSES); ReplyToAddresses replyToAddresses = new ReplyToAddresses(ALL_TO_ADDRESSES, ALL_CC_ADDRESSES); when(replyToParser.getRecipientsToReplyAllTo(message, account)).thenReturn(replyToAddresses); recipientPresenter.initFromReplyToMessage(message, ReplyMode.ALL); // one for To, one for Cc runBackgroundTask(); runBackgroundTask(); verify(recipientMvpView).addRecipients(eq(RecipientType.TO), any(Recipient.class)); verify(recipientMvpView).addRecipients(eq(RecipientType.CC), any(Recipient.class)); } @Test public void initFromReplyToMessage_shouldCallComposePgpInlineDecider() throws Exception { Message message = mock(Message.class); when(replyToParser.getRecipientsToReplyTo(message, account)).thenReturn(TO_ADDRESSES); recipientPresenter.initFromReplyToMessage(message, ReplyMode.NORMAL); verify(composePgpInlineDecider).shouldReplyInline(message); } @Test public void getCurrentCryptoStatus_withoutCryptoProvider() throws Exception { ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.UNCONFIGURED, status.getCryptoStatusDisplayType()); assertEquals(CryptoSpecialModeDisplayType.NONE, status.getCryptoSpecialModeDisplayType()); assertNull(status.getAttachErrorStateOrNull()); assertFalse(status.isProviderStateOk()); assertFalse(status.shouldUsePgpMessageBuilder()); } @Test public void getCurrentCryptoStatus_withCryptoProvider() throws Exception { setupCryptoProvider(noRecipientsAutocryptResult); ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.NO_CHOICE_EMPTY, status.getCryptoStatusDisplayType()); assertTrue(status.isProviderStateOk()); assertTrue(status.shouldUsePgpMessageBuilder()); } @Test public void getCurrentCryptoStatus_withOpportunistic() throws Exception { RecipientAutocryptStatus recipientAutocryptStatus = new RecipientAutocryptStatus( RecipientAutocryptStatusType.AVAILABLE_UNCONFIRMED, null); setupCryptoProvider(recipientAutocryptStatus); ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.NO_CHOICE_AVAILABLE, status.getCryptoStatusDisplayType()); assertTrue(status.isProviderStateOk()); assertTrue(status.shouldUsePgpMessageBuilder()); } @Test public void getCurrentCryptoStatus_withOpportunistic__confirmed() throws Exception { RecipientAutocryptStatus recipientAutocryptStatus = new RecipientAutocryptStatus( RecipientAutocryptStatusType.AVAILABLE_CONFIRMED, null); setupCryptoProvider(recipientAutocryptStatus); ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.NO_CHOICE_AVAILABLE_TRUSTED, status.getCryptoStatusDisplayType()); assertTrue(status.isProviderStateOk()); assertTrue(status.shouldUsePgpMessageBuilder()); } @Test public void getCurrentCryptoStatus_withOpportunistic__missingKeys() throws Exception { RecipientAutocryptStatus recipientAutocryptStatus = new RecipientAutocryptStatus( RecipientAutocryptStatusType.UNAVAILABLE, null); setupCryptoProvider(recipientAutocryptStatus); ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.NO_CHOICE_UNAVAILABLE, status.getCryptoStatusDisplayType()); assertTrue(status.isProviderStateOk()); assertTrue(status.shouldUsePgpMessageBuilder()); } @Test public void getCurrentCryptoStatus_withOpportunistic__privateMissingKeys() throws Exception { RecipientAutocryptStatus recipientAutocryptStatus = new RecipientAutocryptStatus( RecipientAutocryptStatusType.UNAVAILABLE, null); setupCryptoProvider(recipientAutocryptStatus); recipientPresenter.onCryptoModeChanged(CryptoMode.CHOICE_ENABLED); runBackgroundTask(); ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.CHOICE_ENABLED_ERROR, status.getCryptoStatusDisplayType()); assertTrue(status.isProviderStateOk()); assertTrue(status.shouldUsePgpMessageBuilder()); } @Test public void getCurrentCryptoStatus_withModeDisabled() throws Exception { RecipientAutocryptStatus recipientAutocryptStatus = new RecipientAutocryptStatus( RecipientAutocryptStatusType.AVAILABLE_UNCONFIRMED, null); setupCryptoProvider(recipientAutocryptStatus); recipientPresenter.onCryptoModeChanged(CryptoMode.CHOICE_DISABLED); runBackgroundTask(); ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.CHOICE_DISABLED_UNTRUSTED, status.getCryptoStatusDisplayType()); assertTrue(status.isProviderStateOk()); assertTrue(status.shouldUsePgpMessageBuilder()); } @Test public void getCurrentCryptoStatus_withModePrivate() throws Exception { RecipientAutocryptStatus recipientAutocryptStatus = new RecipientAutocryptStatus( RecipientAutocryptStatusType.AVAILABLE_UNCONFIRMED, null); setupCryptoProvider(recipientAutocryptStatus); recipientPresenter.onCryptoModeChanged(CryptoMode.CHOICE_ENABLED); runBackgroundTask(); ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.CHOICE_ENABLED_UNTRUSTED, status.getCryptoStatusDisplayType()); assertTrue(status.isProviderStateOk()); assertTrue(status.shouldUsePgpMessageBuilder()); } @Test public void getCurrentCryptoStatus_withModeSignOnly() throws Exception { setupCryptoProvider(noRecipientsAutocryptResult); recipientPresenter.onMenuSetSignOnly(true); runBackgroundTask(); ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.SIGN_ONLY, status.getCryptoStatusDisplayType()); assertTrue(status.isProviderStateOk()); assertTrue(status.isSigningEnabled()); assertTrue(status.isSignOnly()); } @Test public void getCurrentCryptoStatus_withModeInline() throws Exception { setupCryptoProvider(noRecipientsAutocryptResult); recipientPresenter.onMenuSetPgpInline(true); runBackgroundTask(); ComposeCryptoStatus status = recipientPresenter.getCurrentCachedCryptoStatus(); assertEquals(CryptoStatusDisplayType.NO_CHOICE_EMPTY, status.getCryptoStatusDisplayType()); assertTrue(status.isProviderStateOk()); assertTrue(status.isPgpInlineModeEnabled()); } @Test public void onToTokenAdded_notifiesListenerOfRecipientChange() { recipientPresenter.onToTokenAdded(); verify(listener).onRecipientsChanged(); } @Test public void onToTokenChanged_notifiesListenerOfRecipientChange() { recipientPresenter.onToTokenChanged(); verify(listener).onRecipientsChanged(); } @Test public void onToTokenRemoved_notifiesListenerOfRecipientChange() { recipientPresenter.onToTokenRemoved(); verify(listener).onRecipientsChanged(); } @Test public void onCcTokenAdded_notifiesListenerOfRecipientChange() { recipientPresenter.onCcTokenAdded(); verify(listener).onRecipientsChanged(); } @Test public void onCcTokenChanged_notifiesListenerOfRecipientChange() { recipientPresenter.onCcTokenChanged(); verify(listener).onRecipientsChanged(); } @Test public void onCcTokenRemoved_notifiesListenerOfRecipientChange() { recipientPresenter.onCcTokenRemoved(); verify(listener).onRecipientsChanged(); } @Test public void onBccTokenAdded_notifiesListenerOfRecipientChange() { recipientPresenter.onBccTokenAdded(); verify(listener).onRecipientsChanged(); } @Test public void onBccTokenChanged_notifiesListenerOfRecipientChange() { recipientPresenter.onBccTokenChanged(); verify(listener).onRecipientsChanged(); } @Test public void onBccTokenRemoved_notifiesListenerOfRecipientChange() { recipientPresenter.onBccTokenRemoved(); verify(listener).onRecipientsChanged(); } private void runBackgroundTask() { boolean taskRun = Robolectric.getBackgroundThreadScheduler().runOneTask(); assertTrue(taskRun); } private void setupCryptoProvider(RecipientAutocryptStatus autocryptStatusResult) throws android.os.RemoteException { Account account = mock(Account.class); OpenPgpServiceConnection openPgpServiceConnection = mock(OpenPgpServiceConnection.class); IOpenPgpService2 openPgpService2 = mock(IOpenPgpService2.class); Intent permissionPingIntent = new Intent(); when(autocryptStatusInteractor.retrieveCryptoProviderRecipientStatus( any(OpenPgpApi.class), any(String[].class))).thenReturn(autocryptStatusResult); QMail.setOpenPgpProvider(CRYPTO_PROVIDER); permissionPingIntent.putExtra(OpenPgpApi.RESULT_CODE, OpenPgpApi.RESULT_CODE_SUCCESS); when(account.getCryptoKey()).thenReturn(CRYPTO_KEY_ID); when(openPgpServiceConnection.isBound()).thenReturn(true); when(openPgpServiceConnection.getService()).thenReturn(openPgpService2); when(openPgpService2.execute(any(Intent.class), any(ParcelFileDescriptor.class), any(Integer.class))) .thenReturn(permissionPingIntent); recipientPresenter.setOpenPgpServiceConnection(openPgpServiceConnection, CRYPTO_PROVIDER); recipientPresenter.onSwitchAccount(account); // one for the permission ping, one for the async status update runBackgroundTask(); runBackgroundTask(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.analytics.movingPercentiles; import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregation.ReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramFactory; import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import org.elasticsearch.search.aggregations.metrics.TDigestState; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.AggregationPath; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.stream.Collectors; public class MovingPercentilesPipelineAggregator extends PipelineAggregator { private final int window; private final int shift; MovingPercentilesPipelineAggregator(String name, String[] bucketsPaths, int window, int shift, Map<String, Object> metadata) { super(name, bucketsPaths, metadata); this.window = window; this.shift = shift; } @Override public InternalAggregation reduce(InternalAggregation aggregation, ReduceContext reduceContext) { InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket> histo = (InternalMultiBucketAggregation<? extends InternalMultiBucketAggregation, ? extends InternalMultiBucketAggregation.InternalBucket>) aggregation; List<? extends InternalMultiBucketAggregation.InternalBucket> buckets = histo.getBuckets(); HistogramFactory factory = (HistogramFactory) histo; List<Bucket> newBuckets = new ArrayList<>(buckets.size()); if (buckets.size() == 0) { return factory.createAggregation(newBuckets); } PercentileConfig config = resolvePercentileConfig(histo, buckets.get(0), bucketsPaths()[0]); switch (config.method) { case TDIGEST: reduceTDigest(buckets, histo, newBuckets, factory, config); break; case HDR: reduceHDR(buckets, histo, newBuckets, factory, config); break; default: throw new AggregationExecutionException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + " references an unknown percentile aggregation method: [" + config.method + "]"); } return factory.createAggregation(newBuckets); } private void reduceTDigest(List<? extends InternalMultiBucketAggregation.InternalBucket> buckets, MultiBucketsAggregation histo, List<Bucket> newBuckets, HistogramFactory factory, PercentileConfig config) { List<TDigestState> values = buckets.stream() .map(b -> resolveTDigestBucketValue(histo, b, bucketsPaths()[0])) .filter(v -> v != null) .collect(Collectors.toList()); int index = 0; for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { // Default is to reuse existing bucket. Simplifies the rest of the logic, // since we only change newBucket if we can add to it MultiBucketsAggregation.Bucket newBucket = bucket; TDigestState state = null; int fromIndex = clamp(index - window + shift, values.size()); int toIndex = clamp(index + shift, values.size()); for (int i = fromIndex; i < toIndex; i++) { TDigestState bucketState = values.get(i); if (bucketState != null) { if (state == null) { // We have to create a new TDigest histogram because otherwise it will alter the // existing histogram and bucket value state = new TDigestState(bucketState.compression()); } state.add(bucketState); } } if (state != null) { List<InternalAggregation> aggs = bucket.getAggregations().asList().stream() .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); aggs.add(new InternalTDigestPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs)); } newBuckets.add(newBucket); index++; } } private void reduceHDR(List<? extends InternalMultiBucketAggregation.InternalBucket> buckets, MultiBucketsAggregation histo, List<Bucket> newBuckets, HistogramFactory factory, PercentileConfig config) { List<DoubleHistogram> values = buckets.stream() .map(b -> resolveHDRBucketValue(histo, b, bucketsPaths()[0])) .filter(v -> v != null) .collect(Collectors.toList()); int index = 0; for (InternalMultiBucketAggregation.InternalBucket bucket : buckets) { DoubleHistogram state = null; // Default is to reuse existing bucket. Simplifies the rest of the logic, // since we only change newBucket if we can add to it MultiBucketsAggregation.Bucket newBucket = bucket; int fromIndex = clamp(index - window + shift, values.size()); int toIndex = clamp(index + shift, values.size()); for (int i = fromIndex; i < toIndex; i++) { DoubleHistogram bucketState = values.get(i); if (bucketState != null) { if (state == null) { // We have to create a new HDR histogram because otherwise it will alter the // existing histogram and bucket value state = new DoubleHistogram(bucketState.getNumberOfSignificantValueDigits()); } state.add(bucketState); } } if (state != null) { List<InternalAggregation> aggs = bucket.getAggregations().asList().stream() .map((p) -> (InternalAggregation) p) .collect(Collectors.toList()); aggs.add(new InternalHDRPercentiles(name(), config.keys, state, config.keyed, config.formatter, metadata())); newBucket = factory.createBucket(factory.getKey(bucket), bucket.getDocCount(), new InternalAggregations(aggs)); } newBuckets.add(newBucket); index++; } } private PercentileConfig resolvePercentileConfig(MultiBucketsAggregation agg, InternalMultiBucketAggregation.InternalBucket bucket, String aggPath) { List<String> aggPathsList = AggregationPath.parse(aggPath).getPathElementsAsStringList(); Object propertyValue = bucket.getProperty(agg.getName(), aggPathsList); if (propertyValue == null) { throw buildResolveError(agg, aggPathsList, propertyValue, "percentiles"); } if (propertyValue instanceof InternalTDigestPercentiles) { InternalTDigestPercentiles internalTDigestPercentiles = ((InternalTDigestPercentiles) propertyValue); return new PercentileConfig(PercentilesMethod.TDIGEST, internalTDigestPercentiles.getKeys(), internalTDigestPercentiles.keyed(), internalTDigestPercentiles.formatter()); } if (propertyValue instanceof InternalHDRPercentiles) { InternalHDRPercentiles internalHDRPercentiles = ((InternalHDRPercentiles) propertyValue); return new PercentileConfig(PercentilesMethod.HDR, internalHDRPercentiles.getKeys(), internalHDRPercentiles.keyed(), internalHDRPercentiles.formatter()); } throw buildResolveError(agg, aggPathsList, propertyValue, "percentiles"); } private TDigestState resolveTDigestBucketValue(MultiBucketsAggregation agg, InternalMultiBucketAggregation.InternalBucket bucket, String aggPath) { List<String> aggPathsList = AggregationPath.parse(aggPath).getPathElementsAsStringList(); Object propertyValue = bucket.getProperty(agg.getName(), aggPathsList); if (propertyValue == null || (propertyValue instanceof InternalTDigestPercentiles) == false) { throw buildResolveError(agg, aggPathsList, propertyValue, "TDigest"); } return ((InternalTDigestPercentiles) propertyValue).getState(); } private DoubleHistogram resolveHDRBucketValue(MultiBucketsAggregation agg, InternalMultiBucketAggregation.InternalBucket bucket, String aggPath) { List<String> aggPathsList = AggregationPath.parse(aggPath).getPathElementsAsStringList(); Object propertyValue = bucket.getProperty(agg.getName(), aggPathsList); if (propertyValue == null || (propertyValue instanceof InternalHDRPercentiles) == false) { throw buildResolveError(agg, aggPathsList, propertyValue, "HDR"); } return ((InternalHDRPercentiles) propertyValue).getState(); } private IllegalArgumentException buildResolveError(MultiBucketsAggregation agg, List<String> aggPathsList, Object propertyValue, String method) { if (propertyValue == null) { return new IllegalArgumentException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + " must reference a " + method + " percentile aggregation"); } else { String currentAggName; if (aggPathsList.isEmpty()) { currentAggName = agg.getName(); } else { currentAggName = aggPathsList.get(0); } return new IllegalArgumentException(AbstractPipelineAggregationBuilder.BUCKETS_PATH_FIELD.getPreferredName() + " must reference a " + method + " percentiles aggregation, got: [" + propertyValue.getClass().getSimpleName() + "] at aggregation [" + currentAggName + "]"); } } private int clamp(int index, int length) { if (index < 0) { return 0; } if (index > length) { return length; } return index; } // TODO: replace this with the PercentilesConfig that's used by the percentiles builder. // The config isn't available through the Internal objects /** helper class to collect the percentile's configuration */ private static class PercentileConfig { final double[] keys; final boolean keyed; final PercentilesMethod method; final DocValueFormat formatter; PercentileConfig(PercentilesMethod method, double[] keys, boolean keyed, DocValueFormat formatter) { this.method = method; this.keys = keys; this.keyed = keyed; this.formatter = formatter; } } }
/** * Copyright 2011, Big Switch Networks, Inc. * Originally created by David Erickson, Stanford University * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. **/ package net.floodlightcontroller.storage; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import net.floodlightcontroller.core.module.FloodlightModuleContext; import net.floodlightcontroller.core.module.FloodlightModuleException; import net.floodlightcontroller.core.module.IFloodlightModule; import net.floodlightcontroller.core.module.IFloodlightService; import net.floodlightcontroller.debugcounter.IDebugCounter; import net.floodlightcontroller.debugcounter.IDebugCounterService; import net.floodlightcontroller.debugcounter.IDebugCounterService.MetaData; import net.floodlightcontroller.restserver.IRestApiService; import net.floodlightcontroller.storage.web.StorageWebRoutable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public abstract class AbstractStorageSource implements IStorageSourceService, IFloodlightModule { protected static Logger logger = LoggerFactory.getLogger(AbstractStorageSource.class); // Shared instance of the executor to use to execute the storage tasks. // We make this a single threaded executor, because if we used a thread pool // then storage operations could be executed out of order which would cause // problems in some cases (e.g. delete and update of a row getting reordered). // If we wanted to make this more multi-threaded we could have multiple // worker threads/executors with affinity of operations on a given table // to a single worker thread. But for now, we'll keep it simple and just have // a single thread for all operations. protected static ExecutorService defaultExecutorService = Executors.newSingleThreadExecutor(); protected final static String STORAGE_QUERY_COUNTER_NAME = "StorageQuery"; protected final static String STORAGE_UPDATE_COUNTER_NAME = "StorageUpdate"; protected final static String STORAGE_DELETE_COUNTER_NAME = "StorageDelete"; protected Set<String> allTableNames = new CopyOnWriteArraySet<String>(); protected ExecutorService executorService = defaultExecutorService; protected IStorageExceptionHandler exceptionHandler; protected IDebugCounterService debugCounterService; private Map<String, IDebugCounter> debugCounters = new HashMap<String, IDebugCounter>(); private Map<String, Set<IStorageSourceListener>> listeners = new ConcurrentHashMap<String, Set<IStorageSourceListener>>(); // Our dependencies protected IRestApiService restApi = null; protected static final String DB_ERROR_EXPLANATION = "An unknown error occurred while executing asynchronous " + "database operation"; abstract class StorageCallable<V> implements Callable<V> { public V call() { try { return doStorageOperation(); } catch (StorageException e) { logger.error("Failure in asynchronous call to executeQuery", e); if (exceptionHandler != null) exceptionHandler.handleException(e); throw e; } } abstract protected V doStorageOperation(); } abstract class StorageRunnable implements Runnable { public void run() { try { doStorageOperation(); } catch (StorageException e) { logger.error("Failure in asynchronous call to updateRows", e); if (exceptionHandler != null) exceptionHandler.handleException(e); throw e; } } abstract void doStorageOperation(); } public AbstractStorageSource() { this.executorService = defaultExecutorService; } public void setExecutorService(ExecutorService executorService) { this.executorService = (executorService != null) ? executorService : defaultExecutorService; } @Override public void setExceptionHandler(IStorageExceptionHandler exceptionHandler) { this.exceptionHandler = exceptionHandler; } @Override public abstract void setTablePrimaryKeyName(String tableName, String primaryKeyName); @Override public void createTable(String tableName, Set<String> indexedColumns) { allTableNames.add(tableName); } @Override public Set<String> getAllTableNames() { return allTableNames; } public void setDebugCounterService(IDebugCounterService dcs) { debugCounterService = dcs; } protected void updateCounters(String tableOpType, String tableName) { String counterName = tableName + "__" + tableOpType; IDebugCounter counter = debugCounters.get(counterName); if (counter == null) { counter = debugCounterService.registerCounter(this.getClass().getCanonicalName(), counterName, counterName, MetaData.WARN); debugCounters.put(counterName, counter); // maintain a list of the counters as the tables register with the storage source service } counter.increment(); /* * Now, do the counter for the base only (general update, add, or delete operation) */ counter = debugCounters.get(tableOpType); if (counter == null) { counter = debugCounterService.registerCounter(this.getClass().getCanonicalName(), tableOpType, tableOpType, MetaData.WARN); debugCounters.put(tableOpType, counter); } counter.increment(); } @Override public abstract IQuery createQuery(String tableName, String[] columnNames, IPredicate predicate, RowOrdering ordering); @Override public IResultSet executeQuery(IQuery query) { updateCounters(STORAGE_QUERY_COUNTER_NAME, query.getTableName()); return executeQueryImpl(query); } protected abstract IResultSet executeQueryImpl(IQuery query); @Override public IResultSet executeQuery(String tableName, String[] columnNames, IPredicate predicate, RowOrdering ordering) { IQuery query = createQuery(tableName, columnNames, predicate, ordering); IResultSet resultSet = executeQuery(query); return resultSet; } @Override public Object[] executeQuery(String tableName, String[] columnNames, IPredicate predicate, RowOrdering ordering, IRowMapper rowMapper) { List<Object> objectList = new ArrayList<Object>(); IResultSet resultSet = executeQuery(tableName, columnNames, predicate, ordering); while (resultSet.next()) { Object object = rowMapper.mapRow(resultSet); objectList.add(object); } return objectList.toArray(); } @Override public Future<IResultSet> executeQueryAsync(final IQuery query) { Future<IResultSet> future = executorService.submit( new StorageCallable<IResultSet>() { public IResultSet doStorageOperation() { return executeQuery(query); } }); return future; } @Override public Future<IResultSet> executeQueryAsync(final String tableName, final String[] columnNames, final IPredicate predicate, final RowOrdering ordering) { Future<IResultSet> future = executorService.submit( new StorageCallable<IResultSet>() { public IResultSet doStorageOperation() { return executeQuery(tableName, columnNames, predicate, ordering); } }); return future; } @Override public Future<Object[]> executeQueryAsync(final String tableName, final String[] columnNames, final IPredicate predicate, final RowOrdering ordering, final IRowMapper rowMapper) { Future<Object[]> future = executorService.submit( new StorageCallable<Object[]>() { public Object[] doStorageOperation() { return executeQuery(tableName, columnNames, predicate, ordering, rowMapper); } }); return future; } @Override public Future<?> insertRowAsync(final String tableName, final Map<String,Object> values) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { insertRow(tableName, values); } }, null); return future; } @Override public Future<?> updateRowsAsync(final String tableName, final List<Map<String,Object>> rows) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { updateRows(tableName, rows); } }, null); return future; } @Override public Future<?> updateMatchingRowsAsync(final String tableName, final IPredicate predicate, final Map<String,Object> values) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { updateMatchingRows(tableName, predicate, values); } }, null); return future; } @Override public Future<?> updateRowAsync(final String tableName, final Object rowKey, final Map<String,Object> values) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { updateRow(tableName, rowKey, values); } }, null); return future; } @Override public Future<?> updateRowAsync(final String tableName, final Map<String,Object> values) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { updateRow(tableName, values); } }, null); return future; } @Override public Future<?> deleteRowAsync(final String tableName, final Object rowKey) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { deleteRow(tableName, rowKey); } }, null); return future; } @Override public Future<?> deleteRowsAsync(final String tableName, final Set<Object> rowKeys) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { deleteRows(tableName, rowKeys); } }, null); return future; } @Override public Future<?> deleteMatchingRowsAsync(final String tableName, final IPredicate predicate) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { deleteMatchingRows(tableName, predicate); } }, null); return future; } @Override public Future<?> getRowAsync(final String tableName, final Object rowKey) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { getRow(tableName, rowKey); } }, null); return future; } @Override public Future<?> saveAsync(final IResultSet resultSet) { Future<?> future = executorService.submit( new StorageRunnable() { public void doStorageOperation() { resultSet.save(); } }, null); return future; } @Override public void insertRow(String tableName, Map<String, Object> values) { updateCounters(STORAGE_UPDATE_COUNTER_NAME, tableName); insertRowImpl(tableName, values); } protected abstract void insertRowImpl(String tableName, Map<String, Object> values); @Override public void updateRows(String tableName, List<Map<String,Object>> rows) { updateCounters(STORAGE_UPDATE_COUNTER_NAME, tableName); updateRowsImpl(tableName, rows); } protected abstract void updateRowsImpl(String tableName, List<Map<String,Object>> rows); @Override public void updateMatchingRows(String tableName, IPredicate predicate, Map<String, Object> values) { updateCounters(STORAGE_UPDATE_COUNTER_NAME, tableName); updateMatchingRowsImpl(tableName, predicate, values); } protected abstract void updateMatchingRowsImpl(String tableName, IPredicate predicate, Map<String, Object> values); @Override public void updateRow(String tableName, Object rowKey, Map<String, Object> values) { updateCounters(STORAGE_UPDATE_COUNTER_NAME, tableName); updateRowImpl(tableName, rowKey, values); } protected abstract void updateRowImpl(String tableName, Object rowKey, Map<String, Object> values); @Override public void updateRow(String tableName, Map<String, Object> values) { updateCounters(STORAGE_UPDATE_COUNTER_NAME, tableName); updateRowImpl(tableName, values); } protected abstract void updateRowImpl(String tableName, Map<String, Object> values); @Override public void deleteRow(String tableName, Object rowKey) { updateCounters(STORAGE_DELETE_COUNTER_NAME, tableName); deleteRowImpl(tableName, rowKey); } protected abstract void deleteRowImpl(String tableName, Object rowKey); @Override public void deleteRows(String tableName, Set<Object> rowKeys) { updateCounters(STORAGE_DELETE_COUNTER_NAME, tableName); deleteRowsImpl(tableName, rowKeys); } protected abstract void deleteRowsImpl(String tableName, Set<Object> rowKeys); @Override public void deleteMatchingRows(String tableName, IPredicate predicate) { IResultSet resultSet = null; try { resultSet = executeQuery(tableName, null, predicate, null); while (resultSet.next()) { resultSet.deleteRow(); } resultSet.save(); } finally { if (resultSet != null) resultSet.close(); } } @Override public IResultSet getRow(String tableName, Object rowKey) { updateCounters(STORAGE_QUERY_COUNTER_NAME, tableName); return getRowImpl(tableName, rowKey); } protected abstract IResultSet getRowImpl(String tableName, Object rowKey); @Override public synchronized void addListener(String tableName, IStorageSourceListener listener) { Set<IStorageSourceListener> tableListeners = listeners.get(tableName); if (tableListeners == null) { tableListeners = new CopyOnWriteArraySet<IStorageSourceListener>(); listeners.put(tableName, tableListeners); } tableListeners.add(listener); } @Override public synchronized void removeListener(String tableName, IStorageSourceListener listener) { Set<IStorageSourceListener> tableListeners = listeners.get(tableName); if (tableListeners != null) { tableListeners.remove(listener); } } protected synchronized void notifyListeners(StorageSourceNotification notification) { if (logger.isTraceEnabled()) { logger.trace("Notifying storage listeneres: {}", notification); } String tableName = notification.getTableName(); Set<Object> keys = notification.getKeys(); Set<IStorageSourceListener> tableListeners = listeners.get(tableName); if (tableListeners != null) { for (IStorageSourceListener listener : tableListeners) { try { switch (notification.getAction()) { case MODIFY: listener.rowsModified(tableName, keys); break; case DELETE: listener.rowsDeleted(tableName, keys); break; } } catch (Exception e) { logger.error("Exception caught handling storage notification", e); } } } } @Override public void notifyListeners(List<StorageSourceNotification> notifications) { for (StorageSourceNotification notification : notifications) notifyListeners(notification); } // IFloodlightModule @Override public Collection<Class<? extends IFloodlightService>> getModuleServices() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>(); l.add(IStorageSourceService.class); return l; } @Override public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() { Map<Class<? extends IFloodlightService>, IFloodlightService> m = new HashMap<Class<? extends IFloodlightService>, IFloodlightService>(); m.put(IStorageSourceService.class, this); return m; } @Override public Collection<Class<? extends IFloodlightService>> getModuleDependencies() { Collection<Class<? extends IFloodlightService>> l = new ArrayList<Class<? extends IFloodlightService>>(); l.add(IRestApiService.class); l.add(IDebugCounterService.class); return l; } @Override public void init(FloodlightModuleContext context) throws FloodlightModuleException { restApi = context.getServiceImpl(IRestApiService.class); debugCounterService = context.getServiceImpl(IDebugCounterService.class); } @Override public void startUp(FloodlightModuleContext context) { restApi.addRestletRoutable(new StorageWebRoutable()); debugCounterService.registerModule(this.getClass().getCanonicalName()); } }
package org.keycloak.models.jpa.entities; import javax.persistence.CascadeType; import javax.persistence.CollectionTable; import javax.persistence.Column; import javax.persistence.ElementCollection; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToOne; import javax.persistence.MapKeyColumn; import javax.persistence.OneToMany; import javax.persistence.Table; import javax.persistence.UniqueConstraint; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ @Entity @Table(name="CLIENT", uniqueConstraints = {@UniqueConstraint(columnNames = {"REALM_ID", "CLIENT_ID"})}) public class ClientEntity { @Id @Column(name="ID", length = 36) private String id; @Column(name = "NAME") private String name; @Column(name = "CLIENT_ID") private String clientId; @Column(name="ENABLED") private boolean enabled; @Column(name="SECRET") private String secret; @Column(name="NOT_BEFORE") private int notBefore; @Column(name="PUBLIC_CLIENT") private boolean publicClient; @Column(name="PROTOCOL") private String protocol; @Column(name="FRONTCHANNEL_LOGOUT") private boolean frontchannelLogout; @Column(name="FULL_SCOPE_ALLOWED") private boolean fullScopeAllowed; @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "REALM_ID") protected RealmEntity realm; @ElementCollection @Column(name="VALUE") @CollectionTable(name = "WEB_ORIGINS", joinColumns={ @JoinColumn(name="CLIENT_ID") }) protected Set<String> webOrigins = new HashSet<String>(); @ElementCollection @Column(name="VALUE") @CollectionTable(name = "REDIRECT_URIS", joinColumns={ @JoinColumn(name="CLIENT_ID") }) protected Set<String> redirectUris = new HashSet<String>(); @ElementCollection @MapKeyColumn(name="NAME") @Column(name="VALUE", length = 2048) @CollectionTable(name="CLIENT_ATTRIBUTES", joinColumns={ @JoinColumn(name="CLIENT_ID") }) protected Map<String, String> attributes = new HashMap<String, String>(); @OneToMany(fetch = FetchType.LAZY, mappedBy = "client", cascade = CascadeType.REMOVE) Collection<ClientIdentityProviderMappingEntity> identityProviders = new ArrayList<ClientIdentityProviderMappingEntity>(); @OneToMany(cascade ={CascadeType.REMOVE}, orphanRemoval = true, mappedBy = "client") Collection<ProtocolMapperEntity> protocolMappers = new ArrayList<ProtocolMapperEntity>(); @Column(name="SURROGATE_AUTH_REQUIRED") private boolean surrogateAuthRequired; @Column(name="BASE_URL") private String baseUrl; @Column(name="MANAGEMENT_URL") private String managementUrl; @Column(name="DIRECT_GRANTS_ONLY") protected boolean directGrantsOnly; @Column(name="BEARER_ONLY") private boolean bearerOnly; @Column(name="CONSENT_REQUIRED") private boolean consentRequired; @Column(name="NODE_REREG_TIMEOUT") private int nodeReRegistrationTimeout; @OneToMany(fetch = FetchType.EAGER, cascade ={CascadeType.REMOVE}, orphanRemoval = true, mappedBy = "client") Collection<RoleEntity> roles = new ArrayList<RoleEntity>(); @OneToMany(fetch = FetchType.LAZY, cascade ={CascadeType.REMOVE}, orphanRemoval = true) @JoinTable(name="CLIENT_DEFAULT_ROLES", joinColumns = { @JoinColumn(name="CLIENT_ID")}, inverseJoinColumns = { @JoinColumn(name="ROLE_ID")}) Collection<RoleEntity> defaultRoles = new ArrayList<RoleEntity>(); @ElementCollection @MapKeyColumn(name="NAME") @Column(name="VALUE") @CollectionTable(name="CLIENT_NODE_REGISTRATIONS", joinColumns={ @JoinColumn(name="CLIENT_ID") }) Map<String, Integer> registeredNodes = new HashMap<String, Integer>(); public RealmEntity getRealm() { return realm; } public void setRealm(RealmEntity realm) { this.realm = realm; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public boolean isEnabled() { return enabled; } public void setEnabled(boolean enabled) { this.enabled = enabled; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public Set<String> getWebOrigins() { return webOrigins; } public void setWebOrigins(Set<String> webOrigins) { this.webOrigins = webOrigins; } public Set<String> getRedirectUris() { return redirectUris; } public void setRedirectUris(Set<String> redirectUris) { this.redirectUris = redirectUris; } public String getSecret() { return secret; } public void setSecret(String secret) { this.secret = secret; } public int getNotBefore() { return notBefore; } public void setNotBefore(int notBefore) { this.notBefore = notBefore; } public boolean isPublicClient() { return publicClient; } public void setPublicClient(boolean publicClient) { this.publicClient = publicClient; } public boolean isFullScopeAllowed() { return fullScopeAllowed; } public void setFullScopeAllowed(boolean fullScopeAllowed) { this.fullScopeAllowed = fullScopeAllowed; } public Map<String, String> getAttributes() { return attributes; } public void setAttributes(Map<String, String> attributes) { this.attributes = attributes; } public String getProtocol() { return protocol; } public void setProtocol(String protocol) { this.protocol = protocol; } public boolean isFrontchannelLogout() { return frontchannelLogout; } public void setFrontchannelLogout(boolean frontchannelLogout) { this.frontchannelLogout = frontchannelLogout; } public Collection<ClientIdentityProviderMappingEntity> getIdentityProviders() { return this.identityProviders; } public void setIdentityProviders(Collection<ClientIdentityProviderMappingEntity> identityProviders) { this.identityProviders = identityProviders; } public Collection<ProtocolMapperEntity> getProtocolMappers() { return protocolMappers; } public void setProtocolMappers(Collection<ProtocolMapperEntity> protocolMappers) { this.protocolMappers = protocolMappers; } public boolean isSurrogateAuthRequired() { return surrogateAuthRequired; } public void setSurrogateAuthRequired(boolean surrogateAuthRequired) { this.surrogateAuthRequired = surrogateAuthRequired; } public String getBaseUrl() { return baseUrl; } public void setBaseUrl(String baseUrl) { this.baseUrl = baseUrl; } public String getManagementUrl() { return managementUrl; } public void setManagementUrl(String managementUrl) { this.managementUrl = managementUrl; } public Collection<RoleEntity> getRoles() { return roles; } public void setRoles(Collection<RoleEntity> roles) { this.roles = roles; } public Collection<RoleEntity> getDefaultRoles() { return defaultRoles; } public void setDefaultRoles(Collection<RoleEntity> defaultRoles) { this.defaultRoles = defaultRoles; } public boolean isBearerOnly() { return bearerOnly; } public void setBearerOnly(boolean bearerOnly) { this.bearerOnly = bearerOnly; } public boolean isConsentRequired() { return consentRequired; } public void setConsentRequired(boolean consentRequired) { this.consentRequired = consentRequired; } public boolean isDirectGrantsOnly() { return directGrantsOnly; } public void setDirectGrantsOnly(boolean directGrantsOnly) { this.directGrantsOnly = directGrantsOnly; } public int getNodeReRegistrationTimeout() { return nodeReRegistrationTimeout; } public void setNodeReRegistrationTimeout(int nodeReRegistrationTimeout) { this.nodeReRegistrationTimeout = nodeReRegistrationTimeout; } public Map<String, Integer> getRegisteredNodes() { return registeredNodes; } public void setRegisteredNodes(Map<String, Integer> registeredNodes) { this.registeredNodes = registeredNodes; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.kms.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kms-2014-11-01/GenerateDataKeyPair" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class GenerateDataKeyPairRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * Specifies the encryption context that will be used when encrypting the private key in the data key pair. * </p> * <p> * An <i>encryption context</i> is a collection of non-secret key-value pairs that represents additional * authenticated data. When you use an encryption context to encrypt data, you must specify the same (an exact * case-sensitive match) encryption context to decrypt the data. An encryption context is optional when encrypting * with a symmetric KMS key, but it is highly recommended. * </p> * <p> * For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context">Encryption Context</a> * in the <i>Key Management Service Developer Guide</i>. * </p> */ private com.amazonaws.internal.SdkInternalMap<String, String> encryptionContext; /** * <p> * Specifies the symmetric KMS key that encrypts the private key in the data key pair. You cannot specify an * asymmetric KMS key or a KMS key in a custom key store. To get the type and origin of your KMS key, use the * <a>DescribeKey</a> operation. * </p> * <p> * To specify a KMS key, use its key ID, key ARN, alias name, or alias ARN. When using an alias name, prefix it with * <code>"alias/"</code>. To specify a KMS key in a different Amazon Web Services account, you must use the key ARN * or alias ARN. * </p> * <p> * For example: * </p> * <ul> * <li> * <p> * Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Alias name: <code>alias/ExampleAlias</code> * </p> * </li> * <li> * <p> * Alias ARN: <code>arn:aws:kms:us-east-2:111122223333:alias/ExampleAlias</code> * </p> * </li> * </ul> * <p> * To get the key ID and key ARN for a KMS key, use <a>ListKeys</a> or <a>DescribeKey</a>. To get the alias name and * alias ARN, use <a>ListAliases</a>. * </p> */ private String keyId; /** * <p> * Determines the type of data key pair that is generated. * </p> * <p> * The KMS rule that restricts the use of asymmetric RSA KMS keys to encrypt and decrypt or to sign and verify (but * not both), and the rule that permits you to use ECC KMS keys only to sign and verify, are not effective on data * key pairs, which are used outside of KMS. * </p> */ private String keyPairSpec; /** * <p> * A list of grant tokens. * </p> * <p> * Use a grant token when your permission to call this operation comes from a new grant that has not yet achieved * <i>eventual consistency</i>. For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token">Grant token</a> and <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token">Using a grant * token</a> in the <i>Key Management Service Developer Guide</i>. * </p> */ private com.amazonaws.internal.SdkInternalList<String> grantTokens; /** * <p> * Specifies the encryption context that will be used when encrypting the private key in the data key pair. * </p> * <p> * An <i>encryption context</i> is a collection of non-secret key-value pairs that represents additional * authenticated data. When you use an encryption context to encrypt data, you must specify the same (an exact * case-sensitive match) encryption context to decrypt the data. An encryption context is optional when encrypting * with a symmetric KMS key, but it is highly recommended. * </p> * <p> * For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context">Encryption Context</a> * in the <i>Key Management Service Developer Guide</i>. * </p> * * @return Specifies the encryption context that will be used when encrypting the private key in the data key * pair.</p> * <p> * An <i>encryption context</i> is a collection of non-secret key-value pairs that represents additional * authenticated data. When you use an encryption context to encrypt data, you must specify the same (an * exact case-sensitive match) encryption context to decrypt the data. An encryption context is optional * when encrypting with a symmetric KMS key, but it is highly recommended. * </p> * <p> * For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context">Encryption * Context</a> in the <i>Key Management Service Developer Guide</i>. */ public java.util.Map<String, String> getEncryptionContext() { if (encryptionContext == null) { encryptionContext = new com.amazonaws.internal.SdkInternalMap<String, String>(); } return encryptionContext; } /** * <p> * Specifies the encryption context that will be used when encrypting the private key in the data key pair. * </p> * <p> * An <i>encryption context</i> is a collection of non-secret key-value pairs that represents additional * authenticated data. When you use an encryption context to encrypt data, you must specify the same (an exact * case-sensitive match) encryption context to decrypt the data. An encryption context is optional when encrypting * with a symmetric KMS key, but it is highly recommended. * </p> * <p> * For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context">Encryption Context</a> * in the <i>Key Management Service Developer Guide</i>. * </p> * * @param encryptionContext * Specifies the encryption context that will be used when encrypting the private key in the data key * pair.</p> * <p> * An <i>encryption context</i> is a collection of non-secret key-value pairs that represents additional * authenticated data. When you use an encryption context to encrypt data, you must specify the same (an * exact case-sensitive match) encryption context to decrypt the data. An encryption context is optional when * encrypting with a symmetric KMS key, but it is highly recommended. * </p> * <p> * For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context">Encryption * Context</a> in the <i>Key Management Service Developer Guide</i>. */ public void setEncryptionContext(java.util.Map<String, String> encryptionContext) { this.encryptionContext = encryptionContext == null ? null : new com.amazonaws.internal.SdkInternalMap<String, String>(encryptionContext); } /** * <p> * Specifies the encryption context that will be used when encrypting the private key in the data key pair. * </p> * <p> * An <i>encryption context</i> is a collection of non-secret key-value pairs that represents additional * authenticated data. When you use an encryption context to encrypt data, you must specify the same (an exact * case-sensitive match) encryption context to decrypt the data. An encryption context is optional when encrypting * with a symmetric KMS key, but it is highly recommended. * </p> * <p> * For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context">Encryption Context</a> * in the <i>Key Management Service Developer Guide</i>. * </p> * * @param encryptionContext * Specifies the encryption context that will be used when encrypting the private key in the data key * pair.</p> * <p> * An <i>encryption context</i> is a collection of non-secret key-value pairs that represents additional * authenticated data. When you use an encryption context to encrypt data, you must specify the same (an * exact case-sensitive match) encryption context to decrypt the data. An encryption context is optional when * encrypting with a symmetric KMS key, but it is highly recommended. * </p> * <p> * For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#encrypt_context">Encryption * Context</a> in the <i>Key Management Service Developer Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public GenerateDataKeyPairRequest withEncryptionContext(java.util.Map<String, String> encryptionContext) { setEncryptionContext(encryptionContext); return this; } /** * Add a single EncryptionContext entry * * @see GenerateDataKeyPairRequest#withEncryptionContext * @returns a reference to this object so that method calls can be chained together. */ public GenerateDataKeyPairRequest addEncryptionContextEntry(String key, String value) { if (null == this.encryptionContext) { this.encryptionContext = new com.amazonaws.internal.SdkInternalMap<String, String>(); } if (this.encryptionContext.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.encryptionContext.put(key, value); return this; } /** * Removes all the entries added into EncryptionContext. * * @return Returns a reference to this object so that method calls can be chained together. */ public GenerateDataKeyPairRequest clearEncryptionContextEntries() { this.encryptionContext = null; return this; } /** * <p> * Specifies the symmetric KMS key that encrypts the private key in the data key pair. You cannot specify an * asymmetric KMS key or a KMS key in a custom key store. To get the type and origin of your KMS key, use the * <a>DescribeKey</a> operation. * </p> * <p> * To specify a KMS key, use its key ID, key ARN, alias name, or alias ARN. When using an alias name, prefix it with * <code>"alias/"</code>. To specify a KMS key in a different Amazon Web Services account, you must use the key ARN * or alias ARN. * </p> * <p> * For example: * </p> * <ul> * <li> * <p> * Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Alias name: <code>alias/ExampleAlias</code> * </p> * </li> * <li> * <p> * Alias ARN: <code>arn:aws:kms:us-east-2:111122223333:alias/ExampleAlias</code> * </p> * </li> * </ul> * <p> * To get the key ID and key ARN for a KMS key, use <a>ListKeys</a> or <a>DescribeKey</a>. To get the alias name and * alias ARN, use <a>ListAliases</a>. * </p> * * @param keyId * Specifies the symmetric KMS key that encrypts the private key in the data key pair. You cannot specify an * asymmetric KMS key or a KMS key in a custom key store. To get the type and origin of your KMS key, use the * <a>DescribeKey</a> operation.</p> * <p> * To specify a KMS key, use its key ID, key ARN, alias name, or alias ARN. When using an alias name, prefix * it with <code>"alias/"</code>. To specify a KMS key in a different Amazon Web Services account, you must * use the key ARN or alias ARN. * </p> * <p> * For example: * </p> * <ul> * <li> * <p> * Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Alias name: <code>alias/ExampleAlias</code> * </p> * </li> * <li> * <p> * Alias ARN: <code>arn:aws:kms:us-east-2:111122223333:alias/ExampleAlias</code> * </p> * </li> * </ul> * <p> * To get the key ID and key ARN for a KMS key, use <a>ListKeys</a> or <a>DescribeKey</a>. To get the alias * name and alias ARN, use <a>ListAliases</a>. */ public void setKeyId(String keyId) { this.keyId = keyId; } /** * <p> * Specifies the symmetric KMS key that encrypts the private key in the data key pair. You cannot specify an * asymmetric KMS key or a KMS key in a custom key store. To get the type and origin of your KMS key, use the * <a>DescribeKey</a> operation. * </p> * <p> * To specify a KMS key, use its key ID, key ARN, alias name, or alias ARN. When using an alias name, prefix it with * <code>"alias/"</code>. To specify a KMS key in a different Amazon Web Services account, you must use the key ARN * or alias ARN. * </p> * <p> * For example: * </p> * <ul> * <li> * <p> * Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Alias name: <code>alias/ExampleAlias</code> * </p> * </li> * <li> * <p> * Alias ARN: <code>arn:aws:kms:us-east-2:111122223333:alias/ExampleAlias</code> * </p> * </li> * </ul> * <p> * To get the key ID and key ARN for a KMS key, use <a>ListKeys</a> or <a>DescribeKey</a>. To get the alias name and * alias ARN, use <a>ListAliases</a>. * </p> * * @return Specifies the symmetric KMS key that encrypts the private key in the data key pair. You cannot specify an * asymmetric KMS key or a KMS key in a custom key store. To get the type and origin of your KMS key, use * the <a>DescribeKey</a> operation.</p> * <p> * To specify a KMS key, use its key ID, key ARN, alias name, or alias ARN. When using an alias name, prefix * it with <code>"alias/"</code>. To specify a KMS key in a different Amazon Web Services account, you must * use the key ARN or alias ARN. * </p> * <p> * For example: * </p> * <ul> * <li> * <p> * Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Alias name: <code>alias/ExampleAlias</code> * </p> * </li> * <li> * <p> * Alias ARN: <code>arn:aws:kms:us-east-2:111122223333:alias/ExampleAlias</code> * </p> * </li> * </ul> * <p> * To get the key ID and key ARN for a KMS key, use <a>ListKeys</a> or <a>DescribeKey</a>. To get the alias * name and alias ARN, use <a>ListAliases</a>. */ public String getKeyId() { return this.keyId; } /** * <p> * Specifies the symmetric KMS key that encrypts the private key in the data key pair. You cannot specify an * asymmetric KMS key or a KMS key in a custom key store. To get the type and origin of your KMS key, use the * <a>DescribeKey</a> operation. * </p> * <p> * To specify a KMS key, use its key ID, key ARN, alias name, or alias ARN. When using an alias name, prefix it with * <code>"alias/"</code>. To specify a KMS key in a different Amazon Web Services account, you must use the key ARN * or alias ARN. * </p> * <p> * For example: * </p> * <ul> * <li> * <p> * Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Alias name: <code>alias/ExampleAlias</code> * </p> * </li> * <li> * <p> * Alias ARN: <code>arn:aws:kms:us-east-2:111122223333:alias/ExampleAlias</code> * </p> * </li> * </ul> * <p> * To get the key ID and key ARN for a KMS key, use <a>ListKeys</a> or <a>DescribeKey</a>. To get the alias name and * alias ARN, use <a>ListAliases</a>. * </p> * * @param keyId * Specifies the symmetric KMS key that encrypts the private key in the data key pair. You cannot specify an * asymmetric KMS key or a KMS key in a custom key store. To get the type and origin of your KMS key, use the * <a>DescribeKey</a> operation.</p> * <p> * To specify a KMS key, use its key ID, key ARN, alias name, or alias ARN. When using an alias name, prefix * it with <code>"alias/"</code>. To specify a KMS key in a different Amazon Web Services account, you must * use the key ARN or alias ARN. * </p> * <p> * For example: * </p> * <ul> * <li> * <p> * Key ID: <code>1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Key ARN: <code>arn:aws:kms:us-east-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab</code> * </p> * </li> * <li> * <p> * Alias name: <code>alias/ExampleAlias</code> * </p> * </li> * <li> * <p> * Alias ARN: <code>arn:aws:kms:us-east-2:111122223333:alias/ExampleAlias</code> * </p> * </li> * </ul> * <p> * To get the key ID and key ARN for a KMS key, use <a>ListKeys</a> or <a>DescribeKey</a>. To get the alias * name and alias ARN, use <a>ListAliases</a>. * @return Returns a reference to this object so that method calls can be chained together. */ public GenerateDataKeyPairRequest withKeyId(String keyId) { setKeyId(keyId); return this; } /** * <p> * Determines the type of data key pair that is generated. * </p> * <p> * The KMS rule that restricts the use of asymmetric RSA KMS keys to encrypt and decrypt or to sign and verify (but * not both), and the rule that permits you to use ECC KMS keys only to sign and verify, are not effective on data * key pairs, which are used outside of KMS. * </p> * * @param keyPairSpec * Determines the type of data key pair that is generated. </p> * <p> * The KMS rule that restricts the use of asymmetric RSA KMS keys to encrypt and decrypt or to sign and * verify (but not both), and the rule that permits you to use ECC KMS keys only to sign and verify, are not * effective on data key pairs, which are used outside of KMS. * @see DataKeyPairSpec */ public void setKeyPairSpec(String keyPairSpec) { this.keyPairSpec = keyPairSpec; } /** * <p> * Determines the type of data key pair that is generated. * </p> * <p> * The KMS rule that restricts the use of asymmetric RSA KMS keys to encrypt and decrypt or to sign and verify (but * not both), and the rule that permits you to use ECC KMS keys only to sign and verify, are not effective on data * key pairs, which are used outside of KMS. * </p> * * @return Determines the type of data key pair that is generated. </p> * <p> * The KMS rule that restricts the use of asymmetric RSA KMS keys to encrypt and decrypt or to sign and * verify (but not both), and the rule that permits you to use ECC KMS keys only to sign and verify, are not * effective on data key pairs, which are used outside of KMS. * @see DataKeyPairSpec */ public String getKeyPairSpec() { return this.keyPairSpec; } /** * <p> * Determines the type of data key pair that is generated. * </p> * <p> * The KMS rule that restricts the use of asymmetric RSA KMS keys to encrypt and decrypt or to sign and verify (but * not both), and the rule that permits you to use ECC KMS keys only to sign and verify, are not effective on data * key pairs, which are used outside of KMS. * </p> * * @param keyPairSpec * Determines the type of data key pair that is generated. </p> * <p> * The KMS rule that restricts the use of asymmetric RSA KMS keys to encrypt and decrypt or to sign and * verify (but not both), and the rule that permits you to use ECC KMS keys only to sign and verify, are not * effective on data key pairs, which are used outside of KMS. * @return Returns a reference to this object so that method calls can be chained together. * @see DataKeyPairSpec */ public GenerateDataKeyPairRequest withKeyPairSpec(String keyPairSpec) { setKeyPairSpec(keyPairSpec); return this; } /** * <p> * Determines the type of data key pair that is generated. * </p> * <p> * The KMS rule that restricts the use of asymmetric RSA KMS keys to encrypt and decrypt or to sign and verify (but * not both), and the rule that permits you to use ECC KMS keys only to sign and verify, are not effective on data * key pairs, which are used outside of KMS. * </p> * * @param keyPairSpec * Determines the type of data key pair that is generated. </p> * <p> * The KMS rule that restricts the use of asymmetric RSA KMS keys to encrypt and decrypt or to sign and * verify (but not both), and the rule that permits you to use ECC KMS keys only to sign and verify, are not * effective on data key pairs, which are used outside of KMS. * @return Returns a reference to this object so that method calls can be chained together. * @see DataKeyPairSpec */ public GenerateDataKeyPairRequest withKeyPairSpec(DataKeyPairSpec keyPairSpec) { this.keyPairSpec = keyPairSpec.toString(); return this; } /** * <p> * A list of grant tokens. * </p> * <p> * Use a grant token when your permission to call this operation comes from a new grant that has not yet achieved * <i>eventual consistency</i>. For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token">Grant token</a> and <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token">Using a grant * token</a> in the <i>Key Management Service Developer Guide</i>. * </p> * * @return A list of grant tokens.</p> * <p> * Use a grant token when your permission to call this operation comes from a new grant that has not yet * achieved <i>eventual consistency</i>. For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token">Grant token</a> and * <a href="https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token">Using * a grant token</a> in the <i>Key Management Service Developer Guide</i>. */ public java.util.List<String> getGrantTokens() { if (grantTokens == null) { grantTokens = new com.amazonaws.internal.SdkInternalList<String>(); } return grantTokens; } /** * <p> * A list of grant tokens. * </p> * <p> * Use a grant token when your permission to call this operation comes from a new grant that has not yet achieved * <i>eventual consistency</i>. For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token">Grant token</a> and <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token">Using a grant * token</a> in the <i>Key Management Service Developer Guide</i>. * </p> * * @param grantTokens * A list of grant tokens.</p> * <p> * Use a grant token when your permission to call this operation comes from a new grant that has not yet * achieved <i>eventual consistency</i>. For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token">Grant token</a> and * <a href="https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token">Using * a grant token</a> in the <i>Key Management Service Developer Guide</i>. */ public void setGrantTokens(java.util.Collection<String> grantTokens) { if (grantTokens == null) { this.grantTokens = null; return; } this.grantTokens = new com.amazonaws.internal.SdkInternalList<String>(grantTokens); } /** * <p> * A list of grant tokens. * </p> * <p> * Use a grant token when your permission to call this operation comes from a new grant that has not yet achieved * <i>eventual consistency</i>. For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token">Grant token</a> and <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token">Using a grant * token</a> in the <i>Key Management Service Developer Guide</i>. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setGrantTokens(java.util.Collection)} or {@link #withGrantTokens(java.util.Collection)} if you want to * override the existing values. * </p> * * @param grantTokens * A list of grant tokens.</p> * <p> * Use a grant token when your permission to call this operation comes from a new grant that has not yet * achieved <i>eventual consistency</i>. For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token">Grant token</a> and * <a href="https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token">Using * a grant token</a> in the <i>Key Management Service Developer Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public GenerateDataKeyPairRequest withGrantTokens(String... grantTokens) { if (this.grantTokens == null) { setGrantTokens(new com.amazonaws.internal.SdkInternalList<String>(grantTokens.length)); } for (String ele : grantTokens) { this.grantTokens.add(ele); } return this; } /** * <p> * A list of grant tokens. * </p> * <p> * Use a grant token when your permission to call this operation comes from a new grant that has not yet achieved * <i>eventual consistency</i>. For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token">Grant token</a> and <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token">Using a grant * token</a> in the <i>Key Management Service Developer Guide</i>. * </p> * * @param grantTokens * A list of grant tokens.</p> * <p> * Use a grant token when your permission to call this operation comes from a new grant that has not yet * achieved <i>eventual consistency</i>. For more information, see <a * href="https://docs.aws.amazon.com/kms/latest/developerguide/grants.html#grant_token">Grant token</a> and * <a href="https://docs.aws.amazon.com/kms/latest/developerguide/grant-manage.html#using-grant-token">Using * a grant token</a> in the <i>Key Management Service Developer Guide</i>. * @return Returns a reference to this object so that method calls can be chained together. */ public GenerateDataKeyPairRequest withGrantTokens(java.util.Collection<String> grantTokens) { setGrantTokens(grantTokens); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getEncryptionContext() != null) sb.append("EncryptionContext: ").append(getEncryptionContext()).append(","); if (getKeyId() != null) sb.append("KeyId: ").append(getKeyId()).append(","); if (getKeyPairSpec() != null) sb.append("KeyPairSpec: ").append(getKeyPairSpec()).append(","); if (getGrantTokens() != null) sb.append("GrantTokens: ").append(getGrantTokens()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof GenerateDataKeyPairRequest == false) return false; GenerateDataKeyPairRequest other = (GenerateDataKeyPairRequest) obj; if (other.getEncryptionContext() == null ^ this.getEncryptionContext() == null) return false; if (other.getEncryptionContext() != null && other.getEncryptionContext().equals(this.getEncryptionContext()) == false) return false; if (other.getKeyId() == null ^ this.getKeyId() == null) return false; if (other.getKeyId() != null && other.getKeyId().equals(this.getKeyId()) == false) return false; if (other.getKeyPairSpec() == null ^ this.getKeyPairSpec() == null) return false; if (other.getKeyPairSpec() != null && other.getKeyPairSpec().equals(this.getKeyPairSpec()) == false) return false; if (other.getGrantTokens() == null ^ this.getGrantTokens() == null) return false; if (other.getGrantTokens() != null && other.getGrantTokens().equals(this.getGrantTokens()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getEncryptionContext() == null) ? 0 : getEncryptionContext().hashCode()); hashCode = prime * hashCode + ((getKeyId() == null) ? 0 : getKeyId().hashCode()); hashCode = prime * hashCode + ((getKeyPairSpec() == null) ? 0 : getKeyPairSpec().hashCode()); hashCode = prime * hashCode + ((getGrantTokens() == null) ? 0 : getGrantTokens().hashCode()); return hashCode; } @Override public GenerateDataKeyPairRequest clone() { return (GenerateDataKeyPairRequest) super.clone(); } }
package io.cattle.platform.configitem.context.dao.impl; import io.cattle.platform.configitem.context.dao.LoadBalancerInfoDao; import io.cattle.platform.configitem.context.data.LoadBalancerListenerInfo; import io.cattle.platform.core.addon.LoadBalancerTargetInput; import io.cattle.platform.core.constants.InstanceConstants; import io.cattle.platform.core.constants.ServiceConstants; import io.cattle.platform.core.model.Service; import io.cattle.platform.core.model.ServiceConsumeMap; import io.cattle.platform.core.model.ServiceExposeMap; import io.cattle.platform.core.util.LoadBalancerTargetPortSpec; import io.cattle.platform.core.util.PortSpec; import io.cattle.platform.json.JsonMapper; import io.cattle.platform.object.ObjectManager; import io.cattle.platform.servicediscovery.api.dao.ServiceConsumeMapDao; import io.cattle.platform.servicediscovery.api.dao.ServiceExposeMapDao; import io.cattle.platform.servicediscovery.api.util.ServiceDiscoveryUtil; import io.cattle.platform.servicediscovery.service.ServiceDiscoveryService; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.inject.Inject; public class LoadBalancerInfoDaoImpl implements LoadBalancerInfoDao { @Inject ServiceConsumeMapDao consumeMapDao; @Inject ObjectManager objectManager; @Inject ServiceExposeMapDao exposeMapDao; @Inject JsonMapper jsonMapper; @Inject ServiceDiscoveryService sdService; @Override @SuppressWarnings("unchecked") public List<LoadBalancerListenerInfo> getListeners(Service lbService) { Map<Integer, LoadBalancerListenerInfo> listeners = new HashMap<>(); Map<String, Object> launchConfigData = ServiceDiscoveryUtil.getLaunchConfigDataAsMap(lbService, null); // 1. create listeners Map<String, Boolean> portDefs = new HashMap<>(); if (launchConfigData.get(InstanceConstants.FIELD_PORTS) != null) { for (String port : (List<String>) launchConfigData.get(InstanceConstants.FIELD_PORTS)) { portDefs.put(port, true); } } if (launchConfigData.get(InstanceConstants.FIELD_EXPOSE) != null) { for (String port : (List<String>) launchConfigData.get(InstanceConstants.FIELD_EXPOSE)) { portDefs.put(port, false); } } List<String> sslPorts = getLabeledPorts(launchConfigData, ServiceConstants.LABEL_LB_SSL_PORTS); List<String> proxyProtocolPorts = getLabeledPorts(launchConfigData, ServiceConstants.LABEL_LB_PROXY_PORTS); List<LoadBalancerListenerInfo> listenersToReturn = new ArrayList<>(); for (String port : portDefs.keySet()) { PortSpec spec = new PortSpec(port); String protocol; if (!port.contains("tcp")) { // default to http unless defined otherwise in the compose file protocol = "http"; } else { protocol = "tcp"; } if (listeners.containsKey(spec.getPrivatePort())) { continue; } int targetPort = spec.getPrivatePort(); Integer sourcePort = null; Integer privatePort = null; // set sourcePort only for ports defined in "ports" param // the ones defined in expose, will get translated to private listeners if (portDefs.get(port)) { if (spec.getPublicPort() == null) { sourcePort = targetPort; } else { sourcePort = spec.getPublicPort(); } privatePort = sourcePort; } else { if (spec.getPublicPort() == null) { privatePort = targetPort; } else { privatePort = spec.getPublicPort(); } } String sourceProtocol = protocol; if (sslPorts.contains(privatePort.toString())) { if (protocol.equals("tcp")) { sourceProtocol = "ssl"; } else { sourceProtocol = "https"; } } listenersToReturn.add(new LoadBalancerListenerInfo(lbService, privatePort, sourcePort, sourceProtocol, targetPort, proxyProtocolPorts.contains(privatePort.toString()))); } return listenersToReturn; } @SuppressWarnings("unchecked") protected List<String> getLabeledPorts(Map<String, Object> launchConfigData, String labelName) { List<String> sslPorts = new ArrayList<>(); Map<String, String> labels = (Map<String, String>) launchConfigData.get(InstanceConstants.FIELD_LABELS); if (labels != null) { Object sslPortsObj = labels.get(labelName); if (sslPortsObj != null) { for (String sslPort : sslPortsObj.toString().split(",")) { sslPorts.add(sslPort.trim()); } } } return sslPorts; } @Override public List<LoadBalancerTargetPortSpec> getLoadBalancerTargetPorts(LoadBalancerTargetInput target, List<? extends LoadBalancerListenerInfo> listeners) { List<LoadBalancerTargetPortSpec> portSpecsInitial = new ArrayList<>(); Map<Integer, LoadBalancerListenerInfo> lbSourcePorts = new HashMap<>(); for (LoadBalancerListenerInfo listener : listeners) { lbSourcePorts.put(getSourcePort(listener), listener); } List<Integer> targetSourcePorts = new ArrayList<>(); List<? extends String> portsData = target.getPorts(); if (portsData != null && !portsData.isEmpty()) { for (String portData : portsData) { portSpecsInitial.add(new LoadBalancerTargetPortSpec(portData)); } } List<LoadBalancerTargetPortSpec> portSpecsToReturn = completePortSpecs(portSpecsInitial, listeners, lbSourcePorts, targetSourcePorts); addMissingPortSpecs(lbSourcePorts, targetSourcePorts, portSpecsToReturn); return portSpecsToReturn; } protected void addMissingPortSpecs(Map<Integer, LoadBalancerListenerInfo> lbSourcePorts, List<Integer> targetSourcePorts, List<LoadBalancerTargetPortSpec> completePortSpecs) { // create port specs for missing load balancer source ports for (Integer lbSourcePort : lbSourcePorts.keySet()) { if (!targetSourcePorts.contains(lbSourcePort)) { LoadBalancerListenerInfo listener = lbSourcePorts.get(lbSourcePort); completePortSpecs .add(new LoadBalancerTargetPortSpec(listener.getTargetPort(), getSourcePort(listener))); } } } protected Integer getSourcePort(LoadBalancerListenerInfo listener) { // LEGACY code to support the case when private port is not defined return listener.getPrivatePort() != null ? listener.getPrivatePort() : listener.getSourcePort(); } protected List<LoadBalancerTargetPortSpec> completePortSpecs(List<LoadBalancerTargetPortSpec> portSpecsInitial, List<? extends LoadBalancerListenerInfo> listeners, Map<Integer, LoadBalancerListenerInfo> lbSourcePorts, List<Integer> targetSourcePorts) { // complete missing source ports for port specs List<LoadBalancerTargetPortSpec> portSpecsWithSourcePorts = new ArrayList<>(); for (LoadBalancerTargetPortSpec portSpec : portSpecsInitial) { if (portSpec.getSourcePort() == null) { for (LoadBalancerListenerInfo listener : listeners) { LoadBalancerTargetPortSpec newSpec = new LoadBalancerTargetPortSpec(portSpec); newSpec.setSourcePort(getSourcePort(listener)); portSpecsWithSourcePorts.add(newSpec); // register the fact that the source port is defined on the target targetSourcePorts.add(newSpec.getSourcePort()); } } else { portSpecsWithSourcePorts.add(portSpec); // register the fact that the source port is defined on the target targetSourcePorts.add(portSpec.getSourcePort()); } } // complete missing target ports List<LoadBalancerTargetPortSpec> completePortSpecs = new ArrayList<>(); for (LoadBalancerTargetPortSpec spec : portSpecsWithSourcePorts) { if (spec.getPort() == null) { LoadBalancerListenerInfo listener = lbSourcePorts.get(spec.getSourcePort()); if (listener != null) { spec.setPort(listener.getTargetPort()); completePortSpecs.add(spec); } } else { completePortSpecs.add(spec); } } return completePortSpecs; } @Override public List<LoadBalancerTargetInput> getLoadBalancerTargets(Service lbService) { if (!lbService.getKind().equalsIgnoreCase(ServiceConstants.KIND_LOAD_BALANCER_SERVICE)) { return new ArrayList<>(); } List<LoadBalancerTargetInput> targets = new ArrayList<>(); List<? extends ServiceConsumeMap> lbLinks = consumeMapDao.findConsumedServices(lbService.getId()); for (ServiceConsumeMap lbLink : lbLinks) { List<Service> consumedServices = new ArrayList<>(); findConsumedServicesImpl(lbLink.getConsumedServiceId(), consumedServices); for (Service consumedService : consumedServices) { List<? extends ServiceExposeMap> exposeIpMaps = exposeMapDao.getNonRemovedServiceIpMaps(consumedService .getId()); for (ServiceExposeMap exposeIpMap : exposeIpMaps) { addToTarget(targets, lbLink, exposeIpMap, consumedService); } List<? extends ServiceExposeMap> exposeInstanceMaps = exposeMapDao .getNonRemovedServiceInstanceMaps(consumedService .getId()); for (ServiceExposeMap exposeInstanceMap : exposeInstanceMaps) { addToTarget(targets, lbLink, exposeInstanceMap, consumedService); } } } return targets; } protected void addToTarget(List<LoadBalancerTargetInput> targets, ServiceConsumeMap lbLink, ServiceExposeMap exposeMap, Service service) { if (exposeMap.getDnsPrefix() == null) { targets.add(new LoadBalancerTargetInput(service, exposeMap, lbLink, jsonMapper)); } } protected void findConsumedServicesImpl(long serviceId, List<Service> services) { Service service = objectManager.loadResource(Service.class, serviceId); if (sdService.isActiveService(service)) { if (service.getKind().equalsIgnoreCase(ServiceConstants.KIND_DNS_SERVICE)) { List<? extends ServiceConsumeMap> consumedMaps = consumeMapDao.findConsumedServices(serviceId); for (ServiceConsumeMap consumedMap : consumedMaps) { if (serviceId == consumedMap.getConsumedServiceId().longValue()) { continue; } findConsumedServicesImpl(consumedMap.getConsumedServiceId(), services); } } else { services.add(service); } } } }
/******************************************************************************* * Copyright 2014 United States Government as represented by the * Administrator of the National Aeronautics and Space Administration. * All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ /** * <copyright> * </copyright> * * $Id$ */ package gov.nasa.arc.spife.europa.clientside.esmconfig.provider; import gov.nasa.arc.spife.europa.clientside.esmconfig.EsmConfigFactory; import gov.nasa.arc.spife.europa.clientside.esmconfig.EsmConfigPackage; import gov.nasa.arc.spife.europa.clientside.esmconfig.EuropaServerManagerType; import gov.nasa.arc.spife.europa.clientside.provider.EsmConfigEditPlugin; import java.util.Collection; import java.util.List; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.util.ResourceLocator; import org.eclipse.emf.ecore.EStructuralFeature; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.IEditingDomainItemProvider; import org.eclipse.emf.edit.provider.IItemLabelProvider; import org.eclipse.emf.edit.provider.IItemPropertyDescriptor; import org.eclipse.emf.edit.provider.IItemPropertySource; import org.eclipse.emf.edit.provider.IStructuredItemContentProvider; import org.eclipse.emf.edit.provider.ITreeItemContentProvider; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; import org.eclipse.emf.edit.provider.ItemProviderAdapter; import org.eclipse.emf.edit.provider.ViewerNotification; /** * This is the item provider adapter for a {@link gov.nasa.arc.spife.europa.clientside.esmconfig.EuropaServerManagerType} object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public class EuropaServerManagerTypeItemProvider extends ItemProviderAdapter implements IEditingDomainItemProvider, IStructuredItemContentProvider, ITreeItemContentProvider, IItemLabelProvider, IItemPropertySource { /** * This constructs an instance from a factory and a notifier. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EuropaServerManagerTypeItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /** * This returns the property descriptors for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) { if (itemPropertyDescriptors == null) { super.getPropertyDescriptors(object); addChildTimeoutPropertyDescriptor(object); addDefaultTypePropertyDescriptor(object); addLogLevelPropertyDescriptor(object); addPortPropertyDescriptor(object); } return itemPropertyDescriptors; } /** * This adds a property descriptor for the Child Timeout feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addChildTimeoutPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_EuropaServerManagerType_childTimeout_feature"), getString("_UI_PropertyDescriptor_description", "_UI_EuropaServerManagerType_childTimeout_feature", "_UI_EuropaServerManagerType_type"), EsmConfigPackage.Literals.EUROPA_SERVER_MANAGER_TYPE__CHILD_TIMEOUT, true, false, false, ItemPropertyDescriptor.INTEGRAL_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Default Type feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addDefaultTypePropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_EuropaServerManagerType_defaultType_feature"), getString("_UI_PropertyDescriptor_description", "_UI_EuropaServerManagerType_defaultType_feature", "_UI_EuropaServerManagerType_type"), EsmConfigPackage.Literals.EUROPA_SERVER_MANAGER_TYPE__DEFAULT_TYPE, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Log Level feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addLogLevelPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_EuropaServerManagerType_logLevel_feature"), getString("_UI_PropertyDescriptor_description", "_UI_EuropaServerManagerType_logLevel_feature", "_UI_EuropaServerManagerType_type"), EsmConfigPackage.Literals.EUROPA_SERVER_MANAGER_TYPE__LOG_LEVEL, true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } /** * This adds a property descriptor for the Port feature. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected void addPortPropertyDescriptor(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString("_UI_EuropaServerManagerType_port_feature"), getString("_UI_PropertyDescriptor_description", "_UI_EuropaServerManagerType_port_feature", "_UI_EuropaServerManagerType_type"), EsmConfigPackage.Literals.EUROPA_SERVER_MANAGER_TYPE__PORT, true, false, false, ItemPropertyDescriptor.INTEGRAL_VALUE_IMAGE, null, null)); } /** * This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an * {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or * {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("restriction") @Override public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) { if (childrenFeatures == null) { super.getChildrenFeatures(object); childrenFeatures.add(EsmConfigPackage.Literals.EUROPA_SERVER_MANAGER_TYPE__EUROPA_SERVER); } return childrenFeatures; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EStructuralFeature getChildFeature(Object object, Object child) { // Check the type of the specified child object and return the proper feature to use for // adding (see {@link AddCommand}) it as a child. return super.getChildFeature(object, child); } /** * This returns EuropaServerManagerType.gif. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object getImage(Object object) { return overlayImage(object, getResourceLocator().getImage("full/obj16/EuropaServerManagerType")); } /** * This returns the label text for the adapted class. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String getText(Object object) { EuropaServerManagerType europaServerManagerType = (EuropaServerManagerType)object; return getString("_UI_EuropaServerManagerType_type") + " " + europaServerManagerType.getChildTimeout(); } /** * This handles model notifications by calling {@link #updateChildren} to update any cached * children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void notifyChanged(Notification notification) { updateChildren(notification); switch (notification.getFeatureID(EuropaServerManagerType.class)) { case EsmConfigPackage.EUROPA_SERVER_MANAGER_TYPE__CHILD_TIMEOUT: case EsmConfigPackage.EUROPA_SERVER_MANAGER_TYPE__DEFAULT_TYPE: case EsmConfigPackage.EUROPA_SERVER_MANAGER_TYPE__LOG_LEVEL: case EsmConfigPackage.EUROPA_SERVER_MANAGER_TYPE__PORT: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), false, true)); return; case EsmConfigPackage.EUROPA_SERVER_MANAGER_TYPE__EUROPA_SERVER: fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false)); return; } super.notifyChanged(notification); } /** * This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children * that can be created under this object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("restriction") @Override protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) { super.collectNewChildDescriptors(newChildDescriptors, object); newChildDescriptors.add (createChildParameter (EsmConfigPackage.Literals.EUROPA_SERVER_MANAGER_TYPE__EUROPA_SERVER, EsmConfigFactory.eINSTANCE.createEuropaServerType())); } /** * Return the resource locator for this item provider's resources. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public ResourceLocator getResourceLocator() { return EsmConfigEditPlugin.INSTANCE; } }
/** *============================================================================ * Copyright The Ohio State University Research Foundation, The University of Chicago - * Argonne National Laboratory, Emory University, SemanticBits LLC, and * Ekagra Software Technologies Ltd. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cagrid-core/LICENSE.txt for details. *============================================================================ **/ package gov.nih.nci.cagrid.data.cql2; import gov.nih.nci.cagrid.data.InitializationException; import gov.nih.nci.cagrid.data.MalformedQueryException; import gov.nih.nci.cagrid.data.QueryProcessingException; import gov.nih.nci.cagrid.data.mapping.Mappings; import java.io.InputStream; import java.util.Collection; import java.util.Enumeration; import java.util.HashSet; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Properties; import java.util.Set; import javax.xml.namespace.QName; import org.cagrid.cql2.Aggregation; import org.cagrid.cql2.CQLQuery; import org.cagrid.cql2.CQLQueryModifier; import org.cagrid.cql2.CQLTargetObject; import org.cagrid.cql2.results.CQLAggregateResult; import org.cagrid.cql2.results.CQLQueryResults; import org.cagrid.cql2.results.CQLResult; import org.cagrid.cql2.results.ExtendedCQLResult; /** * CQL2 Query Processor base class * * @author David */ public abstract class CQL2QueryProcessor { private Properties params; private InputStream wsddStream; private Mappings classMappings; public CQL2QueryProcessor() { super(); } /** * Configure the query processor with the properties it requires as specified * in the Properties instance provided by getRequiredParameters(), and values * populated by the user's custom entries, if any. * * Subclasses which need to do one-time initialization after configuration has completed * may override the <code>initialize()</code> method, which will be invoked * at the completion of <code>configure()</code> * * @param parameters * The parameters as configured by the user. The set of keys must contain all * of the keys contained in the Properties object returned * by <code>getRequiredParamters()</code>. The values in the parameters will * be either the user defined value or the default value from * <code>getRequiredParameters()</code>. * @param wsdd * The input stream which contains the wsdd configuration for the data service. * This stream may be important to locating type mappings for serializing and * deserializing beans. * @param classToQnameMappings * The mapping from classname to QName for serialization purposes. * @throws InitializationException */ public void configure(Properties parameters, InputStream wsdd, Mappings classToQnameMappings) throws InitializationException { verifyProvidedParameters(parameters); this.params = parameters; this.wsddStream = wsdd; this.classMappings = classToQnameMappings; initialize(); } /** * Verifies parameters provided from the configure method * contain all required properties * * @param parameters * @throws InitializationException */ private void verifyProvidedParameters(Properties parameters) throws InitializationException { Set<String> required = new HashSet<String>(); // add all the required parameters to a set Enumeration<Object> requiredKeys = getRequiredParameters().keys(); while (requiredKeys.hasMoreElements()) { required.add((String) requiredKeys.nextElement()); } // remove all the parameters provided Enumeration<Object> providedKeys = parameters.keys(); while (providedKeys.hasMoreElements()) { required.remove(providedKeys.nextElement().toString()); } // verify the provided parameters cover the required ones if (required.size() != 0) { // some required parameters NOT specified! StringBuffer error = new StringBuffer(); error.append("Required parameters for query processor "); error.append(getClass().getName()).append(" not specified: "); Iterator<String> requiredKeyIter = required.iterator(); while (requiredKeyIter.hasNext()) { error.append(requiredKeyIter.next()); if (requiredKeyIter.hasNext()) { error.append(", "); } } throw new InitializationException(error.toString()); } } /** * Get a Properties object of parameters the query processor will require * on initialization. * * Subclasses can override this method to return a map describing parameters * their implementation needs. * * The keys are the names of parameters the query processor * requires, the values are the defaults for those properties. The default value * of a property may be an empty string if it is an optional parameter. * The keys MUST be valid java variable names. They MUST NOT contain spaces * or punctuation. They may begin with an upper case character. * * @return * The required properties for the query processor with their default values */ public Properties getRequiredParameters() { return new Properties(); } /** * Get a set of property names whose values should be file names * prepended with the location of the service's etc directory. * Potential uses include locating config files, storing logs, etc. * * Subclasses can override this method to return a Set of Strings which * are the names of property keys from the getRequiredParameters() method. * * @return * The set of property names */ public Set<String> getParametersFromEtc() { return new HashSet<String>(); } /** * Get a collection of the supported CQL 2 Extension types * for the specified extension point * * @param point * @return */ public Collection<QName> getSupportedExtensions(Cql2ExtensionPoint point) { return new HashSet<QName>(); } /** * @return * The parameters as configured by the user at runtime. * The set of keys must contain all of the keys contained in * the Properties object returned by <code>getRequiredParameters()</code>. * The values in the parameters will be either the user defined * value or the default value from <code>getRequiredParameters()</code>. */ protected Properties getConfiguredParameters() { return this.params; } /** * @return * The input stream which contains the wsdd configuration for the data service. * This stream may be important to locating type mappings for serializing and * deserializing beans. */ protected InputStream getConfiguredWsddStream() { return this.wsddStream; } /** * Gets the class to QName mapping for the query processor. * This is derived from information in the service properties supplied * through JNDI at service runtime, or it may be overridden for testing. * * @return * @throws Exception */ protected Mappings getClassToQnameMappings() throws Exception { return this.classMappings; } /** * Perform any post-configuration initialization the query processor requires * When this method is called from the configure() method, the getConfiguredParameters() * and getConfiguredWsddStream() methods will return properly populated objects * * @throws InitializationException */ protected void initialize() throws InitializationException { // left empty for subclass implementation } /** * The primary query processing method. * * @param query * @return * @throws QueryProcessingException * Thrown when an error occurs while handling the query * @throws MalformedQueryException * Thrown when the query is found to be defective */ public abstract CQLQueryResults processQuery(CQLQuery query) throws QueryProcessingException, MalformedQueryException; /** * Returns an iterator over the CQL results. * Subclasses may optionally override this method to provide * a custom lazy implementation of an Iterator to the result set. * * @param query * @return * @throws QueryProcessingException * @throws MalformedQueryException */ public Iterator<CQLResult> processQueryAndIterate(CQLQuery query) throws QueryProcessingException, MalformedQueryException { CQLQueryResults results = processQuery(query); return new ResultsIterator(results); } /** * Returns a count of the number of instances of a given data type. * * This special purpose method gets the count of the number of data types, * and will only ever be called by the data service infrastructure itself for the purpose * of maintaining the instance count resource property. * * This method exists separately from the processQuery method so that implementers * know to provide an accurate count and not worry about the security / user permissions, * and possibly provide a fast and specialized count operation against their back-end data store. * * @param datatype * @return The number of instances of the specified data type available * @throws QueryProcessingException */ public long getInstanceCount(String datatype) throws QueryProcessingException { CQLQuery query = new CQLQuery(); CQLTargetObject target = new CQLTargetObject(); target.setClassName(datatype); CQLQueryModifier mods = new CQLQueryModifier(); mods.setCountOnly(Boolean.TRUE); query.setCQLTargetObject(target); query.setCQLQueryModifier(mods); CQLQueryResults results = null; try { results = processQuery(query); } catch (MalformedQueryException ex) { throw new QueryProcessingException("Error obtaining instance count: " + ex.getMessage(), ex); } CQLAggregateResult aggResult = results.getAggregationResult(); if (aggResult == null) { throw new QueryProcessingException("Query Processor did not return an aggregate result when counting instances!"); } if (!Aggregation.COUNT.equals(aggResult.getAggregation())) { throw new QueryProcessingException("Query Processor did not return aggregation result of the type " + Aggregation.COUNT.getValue()); } long count = -1; try { count = Long.parseLong(results.getAggregationResult().getValue()); } catch (NumberFormatException ex) { throw new QueryProcessingException("Query Processor returned something not parsable as a long for the count: " + ex.getMessage(), ex); } return count; } /** * A simple iterator implementation over a CQL Query Results instance * which returns individual CQLResult instances on next() * * @author David */ private static class ResultsIterator implements Iterator<CQLResult> { private CQLResult[] results = null; private int index = -1; public ResultsIterator(CQLQueryResults queryResults) { if (queryResults.getAggregationResult() != null) { results = new CQLAggregateResult[] {queryResults.getAggregationResult()}; } else if (queryResults.getAttributeResult() != null && queryResults.getAttributeResult().length != 0) { results = queryResults.getAttributeResult(); } else if (queryResults.getObjectResult() != null && queryResults.getObjectResult().length != 0) { results = queryResults.getObjectResult(); } else if (queryResults.getExtendedResult() != null) { results = new ExtendedCQLResult[] {queryResults.getExtendedResult()}; } else { results = new CQLResult[0]; } } public boolean hasNext() { return index + 1 < results.length; } public CQLResult next() { if (hasNext()) { index++; return results[index]; } else { throw new NoSuchElementException(); } } public void remove() { throw new UnsupportedOperationException("remove() is not supported by " + getClass().getName()); } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.step; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import org.pentaho.di.cluster.ClusterSchema; import org.pentaho.di.core.AttributesInterface; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.CheckResultSourceInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Counter; import org.pentaho.di.core.attributes.AttributesUtil; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettlePluginLoaderException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.gui.GUIPositionInterface; import org.pentaho.di.core.gui.Point; import org.pentaho.di.core.plugins.PluginInterface; import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.plugins.StepPluginType; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.resource.ResourceDefinition; import org.pentaho.di.resource.ResourceExportInterface; import org.pentaho.di.resource.ResourceHolderInterface; import org.pentaho.di.resource.ResourceNamingInterface; import org.pentaho.di.resource.ResourceReference; import org.pentaho.di.shared.SharedObjectBase; import org.pentaho.di.shared.SharedObjectInterface; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.errorhandling.StreamInterface; import org.pentaho.di.trans.steps.missing.MissingTrans; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; /** * This class contains everything that is needed to define a step. * * @since 27-mei-2003 * @author Matt * */ public class StepMeta extends SharedObjectBase implements Cloneable, Comparable<StepMeta>, GUIPositionInterface, SharedObjectInterface, CheckResultSourceInterface, ResourceExportInterface, ResourceHolderInterface, AttributesInterface { private static Class<?> PKG = StepMeta.class; // for i18n purposes, needed by Translator2!! public static final String XML_TAG = "step"; public static final String STRING_ID_MAPPING = "Mapping"; public static final String STRING_ID_SINGLE_THREADER = "SingleThreader"; public static final String STRING_ID_ETL_META_INJECT = "MetaInject"; public static final String STRING_ID_JOB_EXECUTOR = "JobExecutor"; public static final String STRING_ID_MAPPING_INPUT = "MappingInput"; public static final String STRING_ID_MAPPING_OUTPUT = "MappingOutput"; private String stepid; // --> StepPlugin.id private String stepname; private StepMetaInterface stepMetaInterface; private boolean selected; private boolean distributes; private RowDistributionInterface rowDistribution; private String copiesString; private Point location; private boolean drawstep; private String description; private boolean terminator; private StepPartitioningMeta stepPartitioningMeta; private StepPartitioningMeta targetStepPartitioningMeta; private ClusterSchema clusterSchema; private String clusterSchemaName; // temporary to resolve later. private StepErrorMeta stepErrorMeta; // OK, we need to explain to this running step that we expect input from remote steps. // This only happens when the previous step "repartitions". (previous step has different // partitioning method than this one) // // So here we go, let's create List members for the remote input and output step // /** These are the remote input steps to read from, one per host:port combination */ private List<RemoteStep> remoteInputSteps; /** These are the remote output steps to write to, one per host:port combination */ private List<RemoteStep> remoteOutputSteps; private ObjectId id; private TransMeta parentTransMeta; private Integer copiesCache = null; protected Map<String, Map<String, String>> attributesMap; /** * @param stepid * The ID of the step: this is derived information, you can also use the constructor without stepid. This * constructor will be deprecated soon. * @param stepname * The name of the new step * @param stepMetaInterface * The step metadata interface to use (TextFileInputMeta, etc) */ public StepMeta( String stepid, String stepname, StepMetaInterface stepMetaInterface ) { this( stepname, stepMetaInterface ); if ( this.stepid == null ) { this.stepid = stepid; } } /** * @param stepname * The name of the new step * @param stepMetaInterface * The step metadata interface to use (TextFileInputMeta, etc) */ public StepMeta( String stepname, StepMetaInterface stepMetaInterface ) { if ( stepMetaInterface != null ) { this.stepid = PluginRegistry.getInstance().getPluginId( StepPluginType.class, stepMetaInterface ); } this.stepname = stepname; setStepMetaInterface( stepMetaInterface ); selected = false; distributes = true; copiesString = "1"; location = new Point( 0, 0 ); drawstep = false; description = null; stepPartitioningMeta = new StepPartitioningMeta(); // targetStepPartitioningMeta = new StepPartitioningMeta(); clusterSchema = null; // non selected by default. remoteInputSteps = new ArrayList<RemoteStep>(); remoteOutputSteps = new ArrayList<RemoteStep>(); attributesMap = new HashMap<String, Map<String, String>>(); } public StepMeta() { this( (String) null, (String) null, (StepMetaInterface) null ); } public String getXML() throws KettleException { return getXML( true ); } public String getXML( boolean includeInterface ) throws KettleException { StringBuilder retval = new StringBuilder( 200 ); retval.append( " <" ).append( XML_TAG ).append( '>' ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", getStepID() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "description", description ) ); retval.append( " " ).append( XMLHandler.addTagValue( "distribute", distributes ) ); retval.append( " " ).append( XMLHandler.addTagValue( "custom_distribution", rowDistribution == null ? null : rowDistribution.getCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "copies", copiesString ) ); retval.append( stepPartitioningMeta.getXML() ); if ( targetStepPartitioningMeta != null ) { retval.append( XMLHandler.openTag( "target_step_partitioning" ) ).append( targetStepPartitioningMeta.getXML() ) .append( XMLHandler.closeTag( "target_step_partitioning" ) ); } if ( includeInterface ) { retval.append( stepMetaInterface.getXML() ); } retval.append( AttributesUtil.getAttributesXml( attributesMap ) ); retval.append( " " ).append( XMLHandler.addTagValue( "cluster_schema", clusterSchema == null ? "" : clusterSchema.getName() ) ); retval.append( " <remotesteps>" ); // Output the remote input steps List<RemoteStep> inputSteps = new ArrayList<RemoteStep>( remoteInputSteps ); Collections.sort( inputSteps ); // sort alphabetically, making it easier to compare XML files retval.append( " <input>" ); for ( RemoteStep remoteStep : inputSteps ) { retval.append( " " ).append( remoteStep.getXML() ).append( Const.CR ); } retval.append( " </input>" ); // Output the remote output steps List<RemoteStep> outputSteps = new ArrayList<RemoteStep>( remoteOutputSteps ); Collections.sort( outputSteps ); // sort alphabetically, making it easier to compare XML files retval.append( " <output>" ); for ( RemoteStep remoteStep : outputSteps ) { retval.append( " " ).append( remoteStep.getXML() ).append( Const.CR ); } retval.append( " </output>" ); retval.append( " </remotesteps>" ); retval.append( " <GUI>" ).append( Const.CR ); retval.append( " <xloc>" ).append( location.x ).append( "</xloc>" ).append( Const.CR ); retval.append( " <yloc>" ).append( location.y ).append( "</yloc>" ).append( Const.CR ); retval.append( " <draw>" ).append( ( drawstep ? "Y" : "N" ) ).append( "</draw>" ).append( Const.CR ); retval.append( " </GUI>" ).append( Const.CR ); retval.append( " </" + XML_TAG + ">" ).append( Const.CR ).append( Const.CR ); return retval.toString(); } /** * Read the step data from XML * * @param stepnode * The XML step node. * @param databases * A list of databases * @param counters * A map with all defined counters. * @deprecated */ @Deprecated public StepMeta( Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters ) throws KettleXMLException, KettlePluginLoaderException { this( stepnode, databases, (IMetaStore) null ); } /** * Read the step data from XML * * @param stepnode * The XML step node. * @param databases * A list of databases * @param counters * A map with all defined counters. * */ public StepMeta( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException, KettlePluginLoaderException { this(); PluginRegistry registry = PluginRegistry.getInstance(); try { stepname = XMLHandler.getTagValue( stepnode, "name" ); stepid = XMLHandler.getTagValue( stepnode, "type" ); // Create a new StepMetaInterface object... PluginInterface sp = registry.findPluginWithId( StepPluginType.class, stepid ); if ( sp == null ) { setStepMetaInterface( (StepMetaInterface) new MissingTrans( stepname, stepid ) ); } else { setStepMetaInterface( (StepMetaInterface) registry.loadClass( sp ) ); } if ( this.stepMetaInterface != null ) { if ( sp != null ) { stepid = sp.getIds()[0]; // revert to the default in case we loaded an alternate version } // Load the specifics from XML... if ( stepMetaInterface != null ) { loadXmlCompatibleStepMeta( stepMetaInterface, stepnode, databases ); stepMetaInterface.loadXML( stepnode, databases, metaStore ); } /* Handle info general to all step types... */ description = XMLHandler.getTagValue( stepnode, "description" ); copiesString = XMLHandler.getTagValue( stepnode, "copies" ); String sdistri = XMLHandler.getTagValue( stepnode, "distribute" ); distributes = "Y".equalsIgnoreCase( sdistri ); if ( sdistri == null ) { distributes = true; // default=distribute } // Load the attribute groups map // attributesMap = AttributesUtil.loadAttributes( XMLHandler.getSubNode( stepnode, AttributesUtil.XML_TAG ) ); // Determine the row distribution // String rowDistributionCode = XMLHandler.getTagValue( stepnode, "custom_distribution" ); rowDistribution = PluginRegistry.getInstance().loadClass( RowDistributionPluginType.class, rowDistributionCode, RowDistributionInterface.class ); // Handle GUI information: location & drawstep? String xloc, yloc; int x, y; xloc = XMLHandler.getTagValue( stepnode, "GUI", "xloc" ); yloc = XMLHandler.getTagValue( stepnode, "GUI", "yloc" ); try { x = Integer.parseInt( xloc ); } catch ( Exception e ) { x = 0; } try { y = Integer.parseInt( yloc ); } catch ( Exception e ) { y = 0; } location = new Point( x, y ); drawstep = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "GUI", "draw" ) ); // The partitioning information? // Node partNode = XMLHandler.getSubNode( stepnode, "partitioning" ); stepPartitioningMeta = new StepPartitioningMeta( partNode ); // Target partitioning information? // Node targetPartNode = XMLHandler.getSubNode( stepnode, "target_step_partitioning" ); partNode = XMLHandler.getSubNode( targetPartNode, "partitioning" ); if ( partNode != null ) { targetStepPartitioningMeta = new StepPartitioningMeta( partNode ); } clusterSchemaName = XMLHandler.getTagValue( stepnode, "cluster_schema" ); // The remote input and output steps... Node remotestepsNode = XMLHandler.getSubNode( stepnode, "remotesteps" ); Node inputNode = XMLHandler.getSubNode( remotestepsNode, "input" ); int nrInput = XMLHandler.countNodes( inputNode, RemoteStep.XML_TAG ); for ( int i = 0; i < nrInput; i++ ) { remoteInputSteps.add( new RemoteStep( XMLHandler.getSubNodeByNr( inputNode, RemoteStep.XML_TAG, i ) ) ); } Node outputNode = XMLHandler.getSubNode( remotestepsNode, "output" ); int nrOutput = XMLHandler.countNodes( outputNode, RemoteStep.XML_TAG ); for ( int i = 0; i < nrOutput; i++ ) { remoteOutputSteps.add( new RemoteStep( XMLHandler.getSubNodeByNr( outputNode, RemoteStep.XML_TAG, i ) ) ); } } } catch ( KettlePluginLoaderException e ) { throw e; } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "StepMeta.Exception.UnableToLoadStepInfo" ) + e .toString(), e ); } } /** * Just in case we missed a v4 plugin using deprecated methods. * * @param stepMetaInterface2 * @param stepnode * @param databases * @throws KettleXMLException */ @SuppressWarnings( "deprecation" ) private void loadXmlCompatibleStepMeta( StepMetaInterface stepMetaInterface2, Node stepnode, List<DatabaseMeta> databases ) throws KettleXMLException { stepMetaInterface.loadXML( stepnode, databases, new HashMap<String, Counter>() ); } /** * Resolves the name of the cluster loaded from XML/Repository to the correct clusterSchema object * * @param clusterSchemas * The list of clusterSchemas to reference. */ public void setClusterSchemaAfterLoading( List<ClusterSchema> clusterSchemas ) { if ( clusterSchemaName == null ) { return; } for ( ClusterSchema look : clusterSchemas ) { if ( look.getName().equals( clusterSchemaName ) ) { clusterSchema = look; } } } public ObjectId getObjectId() { return id; } public void setObjectId( ObjectId id ) { this.id = id; } /** * See wether or not the step is drawn on the canvas. * * @return True if the step is drawn on the canvas. */ public boolean isDrawn() { return drawstep; } /** * See wether or not the step is drawn on the canvas. Same as isDrawn(), but needed for findMethod(StepMeta, drawstep) * called by StringSearcher.findMetaData(). Otherwise findMethod() returns * org.pentaho.di.trans.step.StepMeta.drawStep() instead of isDrawn(). * * @return True if the step is drawn on the canvas. */ public boolean isDrawStep() { return drawstep; } /** * Sets the draw attribute of the step so that it will be drawn on the canvas. * * @param draw * True if you want the step to show itself on the canvas, False if you don't. */ public void setDraw( boolean draw ) { drawstep = draw; setChanged(); } /** * Sets the number of parallel copies that this step will be launched with. * * @param c * The number of copies. */ public void setCopies( int c ) { setChanged(); copiesString = Integer.toString( c ); copiesCache = c; } /** * Get the number of copies to start of a step. This takes into account the partitioning logic. * * @return the number of step copies to start. */ public int getCopies() { // If the step is partitioned, that's going to determine the number of copies, nothing else... // if ( isPartitioned() && getStepPartitioningMeta().getPartitionSchema() != null ) { List<String> partitionIDs = getStepPartitioningMeta().getPartitionSchema().getPartitionIDs(); if ( partitionIDs != null && partitionIDs.size() > 0 ) { // these are the partitions the step can "reach" return partitionIDs.size(); } } if ( copiesCache != null ) { return copiesCache.intValue(); } if ( parentTransMeta != null ) { // Return -1 to indicate that the variable or string value couldn't be converted to number // copiesCache = Const.toInt( parentTransMeta.environmentSubstitute( copiesString ), -1 ); } else { copiesCache = Const.toInt( copiesString, 1 ); } return copiesCache; } public void drawStep() { setDraw( true ); setChanged(); } public void hideStep() { setDraw( false ); setChanged(); } /** * Two steps are equal if their names are equal. * * @return true if the two steps are equal. */ public boolean equals( Object obj ) { if ( obj == null ) { return false; } StepMeta stepMeta = (StepMeta) obj; // getName() is returning stepname, matching the hashCode() algorithm return getName().equalsIgnoreCase( stepMeta.getName() ); } public int hashCode() { return stepname.hashCode(); } public int compareTo( StepMeta o ) { return toString().compareTo( o.toString() ); } public boolean hasChanged() { StepMetaInterface bsi = this.getStepMetaInterface(); return bsi != null ? bsi.hasChanged() : false; } public void setChanged( boolean ch ) { BaseStepMeta bsi = (BaseStepMeta) this.getStepMetaInterface(); if ( bsi != null ) { bsi.setChanged( ch ); } } public void setChanged() { StepMetaInterface bsi = this.getStepMetaInterface(); if ( bsi != null ) { bsi.setChanged(); } } public boolean chosesTargetSteps() { if ( getStepMetaInterface() != null ) { List<StreamInterface> targetStreams = getStepMetaInterface().getStepIOMeta().getTargetStreams(); return targetStreams.isEmpty(); } return false; } public Object clone() { StepMeta stepMeta = new StepMeta(); stepMeta.replaceMeta( this ); stepMeta.setObjectId( null ); return stepMeta; } public void replaceMeta( StepMeta stepMeta ) { this.stepid = stepMeta.stepid; // --> StepPlugin.id this.stepname = stepMeta.stepname; if ( stepMeta.stepMetaInterface != null ) { setStepMetaInterface( (StepMetaInterface) stepMeta.stepMetaInterface.clone() ); } else { this.stepMetaInterface = null; } this.selected = stepMeta.selected; this.distributes = stepMeta.distributes; this.setRowDistribution( stepMeta.getRowDistribution() ); this.copiesString = stepMeta.copiesString; this.copiesCache = null; // force re-calculation if ( stepMeta.location != null ) { this.location = new Point( stepMeta.location.x, stepMeta.location.y ); } else { this.location = null; } this.drawstep = stepMeta.drawstep; this.description = stepMeta.description; this.terminator = stepMeta.terminator; if ( stepMeta.stepPartitioningMeta != null ) { this.stepPartitioningMeta = stepMeta.stepPartitioningMeta.clone(); } else { this.stepPartitioningMeta = null; } if ( stepMeta.clusterSchema != null ) { this.clusterSchema = stepMeta.clusterSchema.clone(); } else { this.clusterSchema = null; } this.clusterSchemaName = stepMeta.clusterSchemaName; // temporary to resolve later. // Also replace the remote steps with cloned versions... // this.remoteInputSteps = new ArrayList<RemoteStep>(); for ( RemoteStep remoteStep : stepMeta.remoteInputSteps ) { this.remoteInputSteps.add( (RemoteStep) remoteStep.clone() ); } this.remoteOutputSteps = new ArrayList<RemoteStep>(); for ( RemoteStep remoteStep : stepMeta.remoteOutputSteps ) { this.remoteOutputSteps.add( (RemoteStep) remoteStep.clone() ); } // The error handling needs to be done too... // if ( stepMeta.stepErrorMeta != null ) { this.stepErrorMeta = stepMeta.stepErrorMeta.clone(); } this.attributesMap = copyStringMap( stepMeta.attributesMap ); // this.setShared(stepMeta.isShared()); this.id = stepMeta.getObjectId(); this.setChanged( true ); } private static Map<String, Map<String, String>> copyStringMap( Map<String, Map<String, String>> map ) { if ( map == null ) { return new HashMap<String, Map<String, String>>(); } Map<String, Map<String, String>> result = new HashMap<String, Map<String, String>>( map.size() ); for ( Map.Entry<String, Map<String, String>> entry : map.entrySet() ) { Map<String, String> value = entry.getValue(); HashMap<String, String> copy = ( value == null ) ? null : new HashMap<String, String>( value ); result.put( entry.getKey(), copy ); } return result; } public StepMetaInterface getStepMetaInterface() { return stepMetaInterface; } public void setStepMetaInterface( StepMetaInterface stepMetaInterface ) { this.stepMetaInterface = stepMetaInterface; if ( stepMetaInterface != null ) { this.stepMetaInterface.setParentStepMeta( this ); } } public String getStepID() { return stepid; } public String getName() { return stepname; } public void setName( String sname ) { stepname = sname; } public String getDescription() { return description; } public void setDescription( String description ) { this.description = description; } public void setSelected( boolean sel ) { selected = sel; } public void flipSelected() { selected = !selected; } public boolean isSelected() { return selected; } public void setTerminator() { setTerminator( true ); } public void setTerminator( boolean t ) { terminator = t; } public boolean hasTerminator() { return terminator; } public StepMeta( ObjectId id_step ) { this( (String) null, (String) null, (StepMetaInterface) null ); setObjectId( id_step ); } public void setLocation( int x, int y ) { int nx = ( x >= 0 ? x : 0 ); int ny = ( y >= 0 ? y : 0 ); Point loc = new Point( nx, ny ); if ( !loc.equals( location ) ) { setChanged(); } location = loc; } public void setLocation( Point loc ) { if ( loc != null && !loc.equals( location ) ) { setChanged(); } location = loc; } public Point getLocation() { return location; } @Deprecated public void check( List<CheckResultInterface> remarks, TransMeta transMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info ) { check( remarks, transMeta, prev, input, output, info, new Variables(), null, null ); } @SuppressWarnings( "deprecation" ) public void check( List<CheckResultInterface> remarks, TransMeta transMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { stepMetaInterface.check( remarks, transMeta, this, prev, input, output, info ); stepMetaInterface.check( remarks, transMeta, this, prev, input, output, info, space, repository, metaStore ); } public String toString() { if ( getName() == null ) { return getClass().getName(); } return getName(); } /** * @return true is the step is partitioned */ public boolean isPartitioned() { return stepPartitioningMeta.isPartitioned(); } /** * @return true is the step is partitioned */ public boolean isTargetPartitioned() { return targetStepPartitioningMeta.isPartitioned(); } /** * @return the stepPartitioningMeta */ public StepPartitioningMeta getStepPartitioningMeta() { return stepPartitioningMeta; } /** * @param stepPartitioningMeta * the stepPartitioningMeta to set */ public void setStepPartitioningMeta( StepPartitioningMeta stepPartitioningMeta ) { this.stepPartitioningMeta = stepPartitioningMeta; } /** * @return the clusterSchema */ public ClusterSchema getClusterSchema() { return clusterSchema; } /** * @param clusterSchema * the clusterSchema to set */ public void setClusterSchema( ClusterSchema clusterSchema ) { this.clusterSchema = clusterSchema; } /** * @return the distributes */ public boolean isDistributes() { return distributes; } /** * @param distributes * the distributes to set */ public void setDistributes( boolean distributes ) { if ( this.distributes != distributes ) { this.distributes = distributes; setChanged(); } } /** * @return the StepErrorMeta error handling metadata for this step */ public StepErrorMeta getStepErrorMeta() { return stepErrorMeta; } /** * @param stepErrorMeta * the error handling metadata for this step */ public void setStepErrorMeta( StepErrorMeta stepErrorMeta ) { this.stepErrorMeta = stepErrorMeta; } /** * Find a step with the ID in a given ArrayList of steps * * @param steps * The List of steps to search * @param id * The ID of the step * @return The step if it was found, null if nothing was found */ public static final StepMeta findStep( List<StepMeta> steps, ObjectId id ) { if ( steps == null ) { return null; } for ( StepMeta stepMeta : steps ) { if ( stepMeta.getObjectId() != null && stepMeta.getObjectId().equals( id ) ) { return stepMeta; } } return null; } /** * Find a step with its name in a given ArrayList of steps * * @param steps * The List of steps to search * @param stepname * The name of the step * @return The step if it was found, null if nothing was found */ public static final StepMeta findStep( List<StepMeta> steps, String stepname ) { if ( steps == null ) { return null; } for ( StepMeta stepMeta : steps ) { if ( stepMeta.getName().equalsIgnoreCase( stepname ) ) { return stepMeta; } } return null; } public boolean supportsErrorHandling() { return stepMetaInterface.supportsErrorHandling(); } /** * @return if error handling is supported for this step, if error handling is defined and a target step is set */ public boolean isDoingErrorHandling() { return stepMetaInterface.supportsErrorHandling() && stepErrorMeta != null && stepErrorMeta.getTargetStep() != null && stepErrorMeta.isEnabled(); } public boolean isSendingErrorRowsToStep( StepMeta targetStep ) { return ( isDoingErrorHandling() && stepErrorMeta.getTargetStep().equals( targetStep ) ); } /** * Support for CheckResultSourceInterface */ public String getTypeId() { return this.getStepID(); } public boolean isMapping() { return STRING_ID_MAPPING.equals( stepid ); } public boolean isSingleThreader() { return STRING_ID_SINGLE_THREADER.equals( stepid ); } public boolean isEtlMetaInject() { return STRING_ID_ETL_META_INJECT.equals( stepid ); } public boolean isJobExecutor() { return STRING_ID_JOB_EXECUTOR.equals( stepid ); } public boolean isMappingInput() { return STRING_ID_MAPPING_INPUT.equals( stepid ); } public boolean isMappingOutput() { return STRING_ID_MAPPING_OUTPUT.equals( stepid ); } /** * Get a list of all the resource dependencies that the step is depending on. * * @return a list of all the resource dependencies that the step is depending on */ public List<ResourceReference> getResourceDependencies( TransMeta transMeta ) { return stepMetaInterface.getResourceDependencies( transMeta, this ); } @Deprecated public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository ) throws KettleException { return exportResources( space, definitions, resourceNamingInterface, repository, repository.getMetaStore() ); } @SuppressWarnings( "deprecation" ) public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { // Compatibility with previous release... // String resources = stepMetaInterface.exportResources( space, definitions, resourceNamingInterface, repository ); if ( resources != null ) { return resources; } // The step calls out to the StepMetaInterface... // These can in turn add anything to the map in terms of resources, etc. // Even reference files, etc. For now it's just XML probably... // return stepMetaInterface.exportResources( space, definitions, resourceNamingInterface, repository, metaStore ); } /** * @return the remoteInputSteps */ public List<RemoteStep> getRemoteInputSteps() { return remoteInputSteps; } /** * @param remoteInputSteps * the remoteInputSteps to set */ public void setRemoteInputSteps( List<RemoteStep> remoteInputSteps ) { this.remoteInputSteps = remoteInputSteps; } /** * @return the remoteOutputSteps */ public List<RemoteStep> getRemoteOutputSteps() { return remoteOutputSteps; } /** * @param remoteOutputSteps * the remoteOutputSteps to set */ public void setRemoteOutputSteps( List<RemoteStep> remoteOutputSteps ) { this.remoteOutputSteps = remoteOutputSteps; } /** * @return the targetStepPartitioningMeta */ public StepPartitioningMeta getTargetStepPartitioningMeta() { return targetStepPartitioningMeta; } /** * @param targetStepPartitioningMeta * the targetStepPartitioningMeta to set */ public void setTargetStepPartitioningMeta( StepPartitioningMeta targetStepPartitioningMeta ) { this.targetStepPartitioningMeta = targetStepPartitioningMeta; } public boolean isRepartitioning() { if ( !isPartitioned() && isTargetPartitioned() ) { return true; } if ( isPartitioned() && isTargetPartitioned() && !stepPartitioningMeta.equals( targetStepPartitioningMeta ) ) { return true; } return false; } public String getHolderType() { return "STEP"; } public boolean isClustered() { return clusterSchema != null; } /** * Set the plugin step id (code) * * @param stepid */ public void setStepID( String stepid ) { this.stepid = stepid; } public void setClusterSchemaName( String clusterSchemaName ) { this.clusterSchemaName = clusterSchemaName; } public void setParentTransMeta( TransMeta parentTransMeta ) { this.parentTransMeta = parentTransMeta; } public TransMeta getParentTransMeta() { return parentTransMeta; } public RowDistributionInterface getRowDistribution() { return rowDistribution; } public void setRowDistribution( RowDistributionInterface rowDistribution ) { this.rowDistribution = rowDistribution; if ( rowDistribution != null ) { setDistributes( true ); } setChanged( true ); } /** * @return the copiesString */ public String getCopiesString() { return copiesString; } /** * @param copiesString * the copiesString to set */ public void setCopiesString( String copiesString ) { this.copiesString = copiesString; copiesCache = null; } @Override public void setAttributesMap( Map<String, Map<String, String>> attributesMap ) { this.attributesMap = attributesMap; } @Override public Map<String, Map<String, String>> getAttributesMap() { return attributesMap; } @Override public void setAttribute( String groupName, String key, String value ) { Map<String, String> attributes = getAttributes( groupName ); if ( attributes == null ) { attributes = new HashMap<String, String>(); attributesMap.put( groupName, attributes ); } attributes.put( key, value ); } @Override public void setAttributes( String groupName, Map<String, String> attributes ) { attributesMap.put( groupName, attributes ); } @Override public Map<String, String> getAttributes( String groupName ) { return attributesMap.get( groupName ); } @Override public String getAttribute( String groupName, String key ) { Map<String, String> attributes = attributesMap.get( groupName ); if ( attributes == null ) { return null; } return attributes.get( key ); } public boolean isMissing() { return this.stepMetaInterface instanceof MissingTrans; } }
package org.sakaiproject.util; import java.io.File; import java.net.URL; import java.net.URLClassLoader; import java.util.*; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.sakaiproject.component.impl.SpringCompMgr; import org.springframework.context.ConfigurableApplicationContext; /** * Verifies behaviors of {@link ComponentsLoader}. * * @author dmccallum@unicon.net * */ public class ComponentsLoaderTest { /** the primary SUT */ private ComponentsLoader loader; /** a helper for generating component dir layouts (and compiled code!) */ private ComponentBuilder builder; /** Current ComponentsLoader impl obligates us to pass a SpringCompMgr, * and we have to specify the app context explicitly b/c that field is * normally set by init() which does _far_ too much work for our purposes. */ private SpringCompMgr componentMgr; @Before public void setUp() throws Exception { loader = new ComponentsLoader(); builder = new ComponentBuilder(); componentMgr = new SpringCompMgr(null) {{ m_ac = new SakaiApplicationContext(); }}; } @After public void tearDown() throws Exception { builder.tearDown(); NoisierDefaultListableBeanFactory.noisyClose = false; componentMgr.close(); NoisierDefaultListableBeanFactory.noisyClose = true; } /** * Verifies that a single, dynamically generated Sakai component can * be properly digested and registered with a given <code>ComponentManager</code> * such that a bean who's implementation is known only to that component * can be subsequently retrieved. * * <p>In reality, "registering with a given <code>ComponentManager</code>" actually * means "registering with a given <code>ComponentManager's</code> underlying * <code>ApplicationContext</code>". This particular test happens to use * a "real" <code>ApplicationContext</code> instance for this purpose. This * was deemed less fragile than mocking that interface since the mock would * require knowledge of <code>BeanDefinitionReader</code> and * <code>ApplicationContext</code> interactions, which are certainly out-of-scope * for this test case. See <a href="http://xunitpatterns.com/Fragile%20Test.html#Overspecified%20Software">Overspecified Software</a></p> * * <p>Part of component registration typically involves making the component's * classes visible to whatever <code>ClassLoader</code> materializes the component's * Spring beans. Theoretically, then, just completing the load operation should be * verification that a component-specific <code>ClassLoader</code> was used properly. * However, because we cannot know (at least not given the current * <code>ComponentsLoader</code> impl) whether or not a bean <code>ClassLoader</code> * is specified when initializing the bean def reader, we assert on our ability * to actually retrieve a bean from the <code>ApplicationContext</code> we passed * (indirectly) to <code>load()</code>. We rely on the <code>ComponentBuilder</code> * to guarantee that the retrieved bean's implementation can only be known to the * component <code>ClassLoader</code>.</p> * * <p>Note that this test does not include verification that the component's * <code>ClassLoader</code>'s parent is the <code>ComponentLoader's</code> * <code>ClassLoader</code>. This is tested directly in more fine-grained BDD tests.</p> * */ @Test public void testLoadRegistersComponentWithComponentManager() { if ( !(builder.isUseable()) ) { sayUnusableBuilder("testLoadRegistersComponentWithComponentManager()"); return; } Component component = builder.buildComponent(); loader.load(componentMgr.getApplicationContext(), builder.getComponentsRootDir().getAbsolutePath()); componentMgr.getApplicationContext().refresh(); // we are not interested in testing SpringCompMgr, but we can assume the underlying // Spring context is a fully tested, known quantity. Hence the getBean() call // (also for reasons outlined in the javadoc) Assert.assertNotNull(componentMgr.getApplicationContext().getBean(component.getBeanId())); } /** * This us very similar to the previous test except that now we check that we can also load components * from JAR files within the lib folder. */ @Test public void testLoadRegisterJarComponentWithManager() { if ( !(builder.isUseable()) ) { sayUnusableBuilder("testLoadRegistersComponentWithComponentManager()"); return; } Component component = builder.buildComponent("test", "Jar1"); loader.load(componentMgr.getApplicationContext(), builder.getComponentsRootDir().getAbsolutePath()); componentMgr.getApplicationContext().refresh(); for (Component.Jar jar : component.getJars()) { Assert.assertNotNull(componentMgr.getApplicationContext().getBean(jar.getBeanId())); } } /** * Same as {@link #testLoadRegistersComponentWithComponentManager()} but for * several components. The intent here is to (hopefully) distinguish clearly * between failures related to loading any given component and failures related * to the algorithm for walking the entire root components dir. */ @Test public void testLoadRegistersMultipleComponentsWithComponentManager() { if ( !(builder.isUseable()) ) { sayUnusableBuilder("testLoadRegistersMultipleComponentsWithComponentManager()"); return; } Component component1 = builder.buildComponent(); Component component2 = builder.buildComponent(); loader.load(componentMgr.getApplicationContext(), builder.getComponentsRootDir().getAbsolutePath()); componentMgr.getApplicationContext().refresh(); Assert.assertNotNull(componentMgr.getApplicationContext().getBean(component1.getBeanId())); Assert.assertNotNull(componentMgr.getApplicationContext().getBean(component2.getBeanId())); } /** * Verifies that the current thread's context class loader * ({@link Thread#getContextClassLoader()}) is only temporarily * replaced by the class loader returned from * {@link ComponentsLoader#newPackageClassLoader(java.io.File)} when * processing a call to * {@link ComponentsLoader#loadComponentPackage(java.io.File, org.springframework.context.ConfigurableApplicationContext)}. * * <p>Unfortunately, given the current implementation, we cannot actually * test that the loader returned from {@link ComponentsLoader#newPackageClassLoader(File)} * is in fact ever assigned as the current context loader, but we have to * assume the entire implementation is working properly if all the other * tests in this class are passing, * {@link #testLoadRegistersMultipleComponentsWithComponentManager()} * in particular. This test is still necessary, though, to verify that * the current thread is still in the expected state after components * have been loaded.</p> */ @Test public void testSetsAndUnsetsPackageClassLoaderAsThreadContextClassLoader() { if ( !(builder.isUseable()) ) { sayUnusableBuilder("testSetsAndUnsetsPackageClassLoaderAsThreadContextClassLoader()"); return; } builder.buildComponent(); ClassLoader existingContextClassLoader = Thread.currentThread().getContextClassLoader(); loader.load(componentMgr.getApplicationContext(), builder.getComponentsRootDir().getAbsolutePath()); Assert.assertSame("Should have preserved existing context class loader after components load completed", existingContextClassLoader, Thread.currentThread().getContextClassLoader()); } /** * Verifies that {@link ComponentsLoader#load(org.springframework.context.ConfigurableApplicationContext, String)} * dispatches internally in the expected fashion. This enables more * direct testing of special implementations of those delegated-to * methods because it guarantees that the internal "protected" * contract of that class is respected. For example, were this * test to be deleted, one may override * {@link ComponentsLoader#newPackageClassLoader(File)} only to * be surprised when the override is never invoked, even if all * other black box tests in this test case were to succeed. */ @Test public void testLoadDispatch() { if ( !(builder.isUseable()) ) { sayUnusableBuilder("testLoadDispatch()"); return; } List<String> expectedJournal = new ArrayList<String>() {{ add("validComponentsPackage"); add("loadComponentPackage"); add("newPackageClassLoader"); }}; final Component component = builder.buildComponent(); final File expectedDir = new File(component.getDir()); final List<String> journal = new ArrayList<String>(); // a poor-man's mock, here loader = new ComponentsLoader() { protected boolean validComponentsPackage(File dir) { Assert.assertEquals(expectedDir, dir); journal.add("validComponentsPackage"); return super.validComponentsPackage(dir); } protected ClassLoader newPackageClassLoader(File dir) { Assert.assertEquals(expectedDir, dir); journal.add("newPackageClassLoader"); return super.newPackageClassLoader(dir); } protected void loadComponentPackage(File dir, ConfigurableApplicationContext ac) { Assert.assertEquals(expectedDir, dir); Assert.assertNotNull(ac); journal.add("loadComponentPackage"); super.loadComponentPackage(dir, ac); } }; loader.load(componentMgr.getApplicationContext(), builder.getComponentsRootDir().getAbsolutePath()); Assert.assertEquals("Did not invoke delegate methods in the expected order", expectedJournal, journal); } /** * Similar to {@link #testLoadDispatch()} but verifies internal * dispatch to protected methods from * {@link ComponentsLoader#loadComponentPackage(File, ConfigurableApplicationContext)}, * which the former test is unable to validate directly. */ @Test public void testLoadComponentPackageDispatch() { if ( !(builder.isUseable()) ) { sayUnusableBuilder("testLoadComponentPackageDispatch()"); return; } // overkill for our needs, but such is life with anon inner classes List<String> expectedJournal = new ArrayList<String>() {{ add("newPackageClassLoader"); }}; final Component component = builder.buildComponent(); final File expectedDir = new File(component.getDir()); final List<String> journal = new ArrayList<String>(); // a poor-man's mock, here loader = new ComponentsLoader() { protected ClassLoader newPackageClassLoader(File dir) { Assert.assertEquals(expectedDir, dir); journal.add("newPackageClassLoader"); return super.newPackageClassLoader(dir); } }; loader.loadComponentPackage(new File(component.getDir()), componentMgr.getApplicationContext()); Assert.assertEquals("Did not invoke newPackageClassLoader()", expectedJournal, journal); } /** * This test verifies that when the components folder is loaded the components are processed * in an alphabetical order rather than the order in which they are returned from the filesystem. * We want this so that we get repeatable loads of the component manager. * This test depends on the filesystem order. If the filesystem always returns the files * alphabetically it won't fail. */ @Test public void testComponentLoadOrder() { if ( !(builder.isUseable()) ) { sayUnusableBuilder("testLoadComponentPackageDispatch()"); return; } // Reverse alphabetical List<String> expectedJournal = new ArrayList<String>() {{ add("sakai-z-pack"); add("sakai-b-pack"); add("sakai-a-pack"); }}; final Component componentA = builder.buildComponent("a"); final Component componentZ = builder.buildComponent("z"); final Component componentB = builder.buildComponent("b"); final List<String> journal = new ArrayList<String>(); loader = new ComponentsLoader() { protected ClassLoader newPackageClassLoader(File dir) { journal.add(dir.getName()); return super.newPackageClassLoader(dir); } }; try { // We reverse it so that we are more sure the correct code is getting run. System.setProperty("sakai.components.reverse.load", "true"); loader.load(componentMgr.getApplicationContext(), builder.getComponentsRootDir().getAbsolutePath()); } finally { System.clearProperty("sakai.components.reverse.load"); } Assert.assertEquals("The components didn't get sorted.", expectedJournal, journal); } /** * This test verifies that when there are multiple JARs within a components folder the JARs are * processed in a alphabetical order. This test may not break as we might get the correct order back * from the filesystem. */ @Test public void testJarLoadOrder() { if ( !(builder.isUseable()) ) { sayUnusableBuilder("testLoadComponentPackageDispatch()"); return; } Component component = builder.buildComponent("jarloadorder", "Jar1", "Jar2", "Jar3"); final List<String> expectedJournal = new ArrayList<String>() {{ add("Jar1.jar"); add("Jar2.jar"); add("Jar3.jar"); }}; final Queue<String> journal = new LinkedList<String>(); loader = new ComponentsLoader() { @Override protected ClassLoader newPackageClassLoader(File dir) { URLClassLoader classLoader = (URLClassLoader)super.newPackageClassLoader(dir); for (URL url : classLoader.getURLs()) { // When we have test components without classes folder this test can be simpler. if (url.getFile().endsWith(".jar")) { journal.add(url.getFile()); } } return classLoader; } }; loader.load(componentMgr.getApplicationContext(), builder.getComponentsRootDir().getAbsolutePath()); for(String jar : expectedJournal) { Assert.assertTrue("Didn't find the expected jar at the correct position.", journal.poll().endsWith(jar)); } } private void sayUnusableBuilder(String invokingMethod) { System.out.println("Unable to execute " + invokingMethod +", probably b/c necessary code generation tools are not available. Please see http://maven.apache.org/general.html#tools-jar-dependency for information on making tools.jar visible in the Maven classpaths."); } }
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.infinispan.schematic.internal.document; import java.text.ParseException; import java.util.Date; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.UUID; import java.util.regex.Pattern; import org.infinispan.schematic.document.Array; import org.infinispan.schematic.document.Binary; import org.infinispan.schematic.document.Code; import org.infinispan.schematic.document.CodeWithScope; import org.infinispan.schematic.document.Document; import org.infinispan.schematic.document.EditableArray; import org.infinispan.schematic.document.EditableDocument; import org.infinispan.schematic.document.MaxKey; import org.infinispan.schematic.document.MinKey; import org.infinispan.schematic.document.Null; import org.infinispan.schematic.document.ObjectId; import org.infinispan.schematic.document.Symbol; public class DocumentEditor implements EditableDocument { /** The serialVersionUID */ private static final long serialVersionUID = 1L; private final MutableDocument document; protected final DocumentValueFactory factory; /** * Return the document that was edited. * * @param document the document to be edited */ public DocumentEditor( MutableDocument document ) { assert document != null; this.document = document; this.factory = DefaultDocumentValueFactory.INSTANCE; } /** * Return the document that was edited. * * @param document the document to be edited * @param factory the factory that should be used to create value objects */ public DocumentEditor( MutableDocument document, DocumentValueFactory factory ) { assert document != null; this.document = document; this.factory = factory != null ? factory : DefaultDocumentValueFactory.INSTANCE; } @Override public DocumentEditor clone() { return new DocumentEditor((MutableDocument)this.document.clone(), factory); } @Override public DocumentEditor with( Map<String, Object> changedFields ) { return new DocumentEditor((MutableDocument)this.document.with(changedFields), factory); } @Override public Document with( String fieldName, Object value ) { return new DocumentEditor((MutableDocument)this.document.with(fieldName, value), factory); } @Override public DocumentEditor with( ValueTransformer transformer ) { return new DocumentEditor((MutableDocument)this.document.with(transformer), factory); } @Override public Document withVariablesReplaced( Properties properties ) { return new DocumentEditor((MutableDocument)this.document.withVariablesReplaced(properties), factory); } @Override public Document withVariablesReplacedWithSystemProperties() { return new DocumentEditor((MutableDocument)this.document.withVariablesReplacedWithSystemProperties(), factory); } @Override public Document unwrap() { return document; } public MutableDocument asMutableDocument() { return document; } @Override public Object get( String name ) { return document.get(name); } @Override public Boolean getBoolean( String name ) { return document.getBoolean(name); } @Override public boolean getBoolean( String name, boolean defaultValue ) { return document.getBoolean(name, defaultValue); } public Object put( String name, Object value ) { return doSetValue(name, value); } @Override public void putAll( Document object ) { doSetAllValues(object); } @Override public void putAll( Map<? extends String, ? extends Object> map ) { doSetAllValues(map); } @Override public void merge( Document other ) { if (other == this) return; for (Field field : other.fields()) { Document otherDoc = field.getValueAsDocument(); if (!Null.matches(otherDoc)) { // Get the corresponding value in this document ... EditableDocument thisField = getDocument(field.getName()); if (!Null.matches(thisField)) { // There are docs in both sides, so merge them ... thisField.merge(otherDoc); } else { // There is not a document on this side (perhaps another value), so replace with that other doc ... doSetValue(field.getName(), otherDoc); } } else { // The field is something other than a document, so just set it on this document ... doSetValue(field.getName(), field.getValue()); } } } @Override public Object remove( String name ) { return document.remove(name); } @Override public Integer getInteger( String name ) { return document.getInteger(name); } @Override public int getInteger( String name, int defaultValue ) { return document.getInteger(name, defaultValue); } @Override public Long getLong( String name ) { return document.getLong(name); } @Override public long getLong( String name, long defaultValue ) { return document.getLong(name, defaultValue); } @Override public Double getDouble( String name ) { return document.getDouble(name); } @Override public double getDouble( String name, double defaultValue ) { return document.getDouble(name, defaultValue); } @Override public Number getNumber( String name ) { return document.getNumber(name); } @Override public Number getNumber( String name, Number defaultValue ) { return document.getNumber(name, defaultValue); } @Override public String getString( String name ) { return document.getString(name); } @Override public String getString( String name, String defaultValue ) { return document.getString(name, defaultValue); } @Override public EditableArray getArray( String name ) { return editable(document.getArray(name), name); } @Override public EditableArray getOrCreateArray( String name ) { List<?> existing = document.getArray(name); return existing != null ? editable(existing, name) : setArray(name); } @Override public EditableDocument getDocument( String name ) { return editable(document.getDocument(name), name); } @Override public EditableDocument getOrCreateDocument( String name ) { Document existing = document.getDocument(name); return existing != null ? editable(existing, name) : setDocument(name); } @Override public boolean isNull( String name ) { return document.isNull(name); } @Override public boolean isNullOrMissing( String name ) { return document.isNullOrMissing(name); } @Override public MaxKey getMaxKey( String name ) { return document.getMaxKey(name); } @Override public MinKey getMinKey( String name ) { return document.getMinKey(name); } @Override public Code getCode( String name ) { return document.getCode(name); } @Override public CodeWithScope getCodeWithScope( String name ) { return document.getCodeWithScope(name); } @Override public ObjectId getObjectId( String name ) { return document.getObjectId(name); } @Override public Binary getBinary( String name ) { return document.getBinary(name); } @Override public Symbol getSymbol( String name ) { return document.getSymbol(name); } @Override public Pattern getPattern( String name ) { return document.getPattern(name); } @Override public UUID getUuid( String name ) { return document.getUuid(name); } @Override public UUID getUuid( String name, UUID defaultValue ) { return document.getUuid(name, defaultValue); } @Override public int getType( String name ) { return document.getType(name); } @Override public Map<String, ? extends Object> toMap() { return document.toMap(); } @Override public Iterable<Field> fields() { return document.fields(); } @Override public boolean containsField( String name ) { return document.containsField(name); } @Override public boolean containsAll( Document document ) { return this.document.containsAll(document); } @Override public Set<String> keySet() { return document.keySet(); } @Override public int size() { return document.size(); } @Override public boolean isEmpty() { return document.isEmpty(); } @Override public void removeAll() { document.removeAll(); } @Override public EditableDocument set( String name, Object value ) { doSetValue(name, value); return this; } @Override public EditableDocument setBoolean( String name, boolean value ) { doSetValue(name, factory.createBoolean(value)); return this; } @Override public EditableDocument setNumber( String name, int value ) { doSetValue(name, factory.createInt(value)); return this; } @Override public EditableDocument setNumber( String name, long value ) { doSetValue(name, factory.createLong(value)); return this; } @Override public EditableDocument setNumber( String name, float value ) { doSetValue(name, factory.createDouble(value)); return this; } @Override public EditableDocument setNumber( String name, double value ) { doSetValue(name, factory.createDouble(value)); return this; } @Override public EditableDocument setString( String name, String value ) { doSetValue(name, factory.createString(value)); return this; } @Override public EditableDocument setSymbol( String name, String value ) { doSetValue(name, factory.createSymbol(value)); return this; } @Override public EditableDocument setDocument( String name ) { BasicDocument doc = new BasicDocument(); doSetValueIfAbsent(name, doc); return editable(doc, name); } @Override public EditableDocument setDocument( String name, Document document ) { if (document instanceof DocumentEditor) document = ((DocumentEditor)document).asMutableDocument(); doSetValue(name, document); return editable(document, name); } @Override public EditableArray setArray( String name ) { List<?> array = new BasicArray(); doSetValueIfAbsent(name, array); return editable(array, name); } @Override public EditableArray setArray( String name, Array array ) { if (array instanceof ArrayEditor) array = ((ArrayEditor)array).unwrap(); doSetValue(name, array); return editable((List<?>)array, name); } @Override public EditableArray setArray( String name, Object... values ) { List<?> array = new BasicArray(values); doSetValue(name, array); return editable(array, name); } @Override public EditableDocument setDate( String name, Date value ) { doSetValue(name, value); return this; } @Override public EditableDocument setDate( String name, String isoDate ) throws ParseException { doSetValue(name, factory.createDate(isoDate)); return this; } @Override public EditableDocument setTimestamp( String name, int timeInSeconds, int increment ) { doSetValue(name, factory.createTimestamp(timeInSeconds, increment)); return this; } @Override public EditableDocument setObjectId( String name, String hex ) { doSetValue(name, factory.createObjectId(hex)); return this; } @Override public EditableDocument setObjectId( String name, byte[] bytes ) { doSetValue(name, factory.createObjectId(bytes)); return this; } @Override public EditableDocument setObjectId( String name, int time, int machine, int process, int inc ) { doSetValue(name, factory.createObjectId(time, machine, process, inc)); return this; } @Override public EditableDocument setRegularExpression( String name, String pattern ) { doSetValue(name, factory.createRegex(pattern, null)); return this; } @Override public EditableDocument setRegularExpression( String name, String pattern, int flags ) { doSetValue(name, factory.createRegex(pattern, BsonUtils.regexFlagsFor(flags))); return this; } @Override public EditableDocument setNull( String name ) { doSetValue(name, factory.createNull()); return this; } @Override public EditableDocument setBinary( String name, byte type, byte[] data ) { doSetValue(name, factory.createBinary(type, data)); return this; } @Override public EditableDocument setUuid( String name, UUID uuid ) { doSetValue(name, uuid); return this; } @Override public EditableDocument setCode( String name, String code, boolean includeScope ) { if (includeScope) { BasicDocument scope = new BasicDocument(); doSetValue(name, factory.createCode(code, scope)); return editable(scope, name); } doSetValue(name, factory.createCode(code)); return this; } @Override public EditableDocument setCode( String name, String code, Document scope ) { if (scope != null) { doSetValue(name, factory.createCode(code, scope)); return editable(scope, name); } doSetValue(name, factory.createCode(code)); return this; } /** * The method that does the actual setting for all of the <code>set...</code> methods. This method may be overridden by * subclasses when additional work needs to be performed during the set operations. * * @param name the name of the field being set * @param value the new value * @return the old value, or null if there was no existing value */ protected Object doSetValue( String name, Object value ) { if (value == null) { value = Null.getInstance(); } else { value = Utility.unwrap(value); } return document.put(name, value); } /** * The method that does the actual setting for all of the <code>set...</code> methods. This method may be overridden by * subclasses when additional work needs to be performed during the set operations. * * @param name the name of the field being set * @param value the new value * @return the old value, or null if there was no existing value */ protected Object doSetValueIfAbsent( String name, Object value ) { if (value == null) { value = Null.getInstance(); } else { value = Utility.unwrap(value); } return document.put(name, value); } /** * The method that does the actual setting for all of the {@link #putAll(Document)} method. This method may be overridden by * subclasses when additional work needs to be performed during this operation. * * @param values the document containing the fields to be added */ protected void doSetAllValues( Document values ) { if (values != null) { values = Utility.unwrap(values); document.putAll(values); } } /** * The method that does the actual setting for all of the {@link #putAll(Map)} method. This method may be overridden by * subclasses when additional work needs to be performed during this operation. * * @param values the map containing the fields to be added */ protected void doSetAllValues( Map<? extends String, ? extends Object> values ) { if (values != null) { document.putAll(Utility.unwrapValues(values)); } } protected EditableDocument editable( Document doc, String fieldName ) { if (doc == null) return null; assert !(doc instanceof DocumentEditor) : "The document value should not be a DocumentEditor instance"; if (doc instanceof MutableArray) { return createEditableArray((MutableArray)doc, fieldName, factory); } assert doc instanceof MutableDocument; return createEditableDocument((MutableDocument)doc, fieldName, factory); } protected EditableArray editable( List<?> array, String fieldName ) { if (array == null) return null; assert !(array instanceof ArrayEditor) : "The array value should not be an ArrayEditor instance"; return createEditableArray((BasicArray)array, fieldName, factory); } protected EditableDocument createEditableDocument( MutableDocument document, String fieldName, DocumentValueFactory factory ) { return new DocumentEditor(document, factory); } protected EditableArray createEditableArray( MutableArray array, String fieldName, DocumentValueFactory factory ) { return new ArrayEditor(array, factory); } @Override public String toString() { return document.toString(); } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.ss.formula; import org.apache.poi.ss.formula.FormulaCellCache.IEntryOperation; import org.apache.poi.ss.formula.FormulaUsedBlankCellSet.BookSheetKey; import org.apache.poi.ss.formula.PlainCellCache.Loc; import org.apache.poi.ss.formula.eval.BlankEval; import org.apache.poi.ss.formula.eval.BoolEval; import org.apache.poi.ss.formula.eval.ErrorEval; import org.apache.poi.ss.formula.eval.NumberEval; import org.apache.poi.ss.formula.eval.StringEval; import org.apache.poi.ss.formula.eval.ValueEval; import org.apache.poi.ss.usermodel.CellType; /** * Performance optimisation for {@link org.apache.poi.ss.usermodel.FormulaEvaluator}. * This class stores previously calculated values of already visited cells, * to avoid unnecessary re-calculation when the same cells are referenced multiple times * * @author Josh Micich */ final class EvaluationCache { private final PlainCellCache _plainCellCache; private final FormulaCellCache _formulaCellCache; /** only used for testing. <code>null</code> otherwise */ final IEvaluationListener _evaluationListener; /* package */EvaluationCache(IEvaluationListener evaluationListener) { _evaluationListener = evaluationListener; _plainCellCache = new PlainCellCache(); _formulaCellCache = new FormulaCellCache(); } public void notifyUpdateCell(int bookIndex, int sheetIndex, EvaluationCell cell) { FormulaCellCacheEntry fcce = _formulaCellCache.get(cell); int rowIndex = cell.getRowIndex(); int columnIndex = cell.getColumnIndex(); Loc loc = new Loc(bookIndex, sheetIndex, rowIndex, columnIndex); PlainValueCellCacheEntry pcce = _plainCellCache.get(loc); if (cell.getCellTypeEnum() == CellType.FORMULA) { if (fcce == null) { fcce = new FormulaCellCacheEntry(); if (pcce == null) { if (_evaluationListener != null) { _evaluationListener.onChangeFromBlankValue(sheetIndex, rowIndex, columnIndex, cell, fcce); } updateAnyBlankReferencingFormulas(bookIndex, sheetIndex, rowIndex, columnIndex); } _formulaCellCache.put(cell, fcce); } else { fcce.recurseClearCachedFormulaResults(_evaluationListener); fcce.clearFormulaEntry(); } if (pcce == null) { // was formula cell before - no change of type } else { // changing from plain cell to formula cell pcce.recurseClearCachedFormulaResults(_evaluationListener); _plainCellCache.remove(loc); } } else { ValueEval value = WorkbookEvaluator.getValueFromNonFormulaCell(cell); if (pcce == null) { if (value != BlankEval.instance) { // only cache non-blank values in the plain cell cache // (dependencies on blank cells are managed by // FormulaCellCacheEntry._usedBlankCellGroup) pcce = new PlainValueCellCacheEntry(value); if (fcce == null) { if (_evaluationListener != null) { _evaluationListener.onChangeFromBlankValue(sheetIndex, rowIndex, columnIndex, cell, pcce); } updateAnyBlankReferencingFormulas(bookIndex, sheetIndex, rowIndex, columnIndex); } _plainCellCache.put(loc, pcce); } } else { if (pcce.updateValue(value)) { pcce.recurseClearCachedFormulaResults(_evaluationListener); } if (value == BlankEval.instance) { _plainCellCache.remove(loc); } } if (fcce == null) { // was plain cell before - no change of type } else { // was formula cell before - now a plain value _formulaCellCache.remove(cell); fcce.setSensitiveInputCells(null); fcce.recurseClearCachedFormulaResults(_evaluationListener); } } } private void updateAnyBlankReferencingFormulas(int bookIndex, int sheetIndex, final int rowIndex, final int columnIndex) { final BookSheetKey bsk = new BookSheetKey(bookIndex, sheetIndex); _formulaCellCache.applyOperation(new IEntryOperation() { public void processEntry(FormulaCellCacheEntry entry) { entry.notifyUpdatedBlankCell(bsk, rowIndex, columnIndex, _evaluationListener); } }); } public PlainValueCellCacheEntry getPlainValueEntry(int bookIndex, int sheetIndex, int rowIndex, int columnIndex, ValueEval value) { Loc loc = new Loc(bookIndex, sheetIndex, rowIndex, columnIndex); PlainValueCellCacheEntry result = _plainCellCache.get(loc); if (result == null) { result = new PlainValueCellCacheEntry(value); _plainCellCache.put(loc, result); if (_evaluationListener != null) { _evaluationListener.onReadPlainValue(sheetIndex, rowIndex, columnIndex, result); } } else { // TODO - if we are confident that this sanity check is not required, we can remove 'value' from plain value cache entry if (!areValuesEqual(result.getValue(), value)) { throw new IllegalStateException("value changed"); } if (_evaluationListener != null) { _evaluationListener.onCacheHit(sheetIndex, rowIndex, columnIndex, value); } } return result; } private boolean areValuesEqual(ValueEval a, ValueEval b) { if (a == null) { return false; } Class<?> cls = a.getClass(); if (cls != b.getClass()) { // value type is changing return false; } if (a == BlankEval.instance) { return b == a; } if (cls == NumberEval.class) { return ((NumberEval)a).getNumberValue() == ((NumberEval)b).getNumberValue(); } if (cls == StringEval.class) { return ((StringEval)a).getStringValue().equals(((StringEval)b).getStringValue()); } if (cls == BoolEval.class) { return ((BoolEval)a).getBooleanValue() == ((BoolEval)b).getBooleanValue(); } if (cls == ErrorEval.class) { return ((ErrorEval)a).getErrorCode() == ((ErrorEval)b).getErrorCode(); } throw new IllegalStateException("Unexpected value class (" + cls.getName() + ")"); } public FormulaCellCacheEntry getOrCreateFormulaCellEntry(EvaluationCell cell) { FormulaCellCacheEntry result = _formulaCellCache.get(cell); if (result == null) { result = new FormulaCellCacheEntry(); _formulaCellCache.put(cell, result); } return result; } /** * Should be called whenever there are changes to input cells in the evaluated workbook. */ public void clear() { if(_evaluationListener != null) { _evaluationListener.onClearWholeCache(); } _plainCellCache.clear(); _formulaCellCache.clear(); } public void notifyDeleteCell(int bookIndex, int sheetIndex, EvaluationCell cell) { if (cell.getCellTypeEnum() == CellType.FORMULA) { FormulaCellCacheEntry fcce = _formulaCellCache.remove(cell); if (fcce == null) { // formula cell has not been evaluated yet } else { fcce.setSensitiveInputCells(null); fcce.recurseClearCachedFormulaResults(_evaluationListener); } } else { Loc loc = new Loc(bookIndex, sheetIndex, cell.getRowIndex(), cell.getColumnIndex()); PlainValueCellCacheEntry pcce = _plainCellCache.get(loc); if (pcce == null) { // cache entry doesn't exist. nothing to do } else { pcce.recurseClearCachedFormulaResults(_evaluationListener); } } } }
// // $ // package jsky.app.ot.gemini.flamingos2; import com.jgoodies.forms.factories.DefaultComponentFactory; import edu.gemini.pot.sp.ISPObsComponent; import edu.gemini.pot.sp.SPComponentType; import edu.gemini.shared.gui.ThinBorder; import edu.gemini.shared.gui.bean.*; import edu.gemini.shared.util.immutable.None; import edu.gemini.shared.util.immutable.Option; import edu.gemini.spModel.core.Site; import edu.gemini.spModel.data.YesNoType; import edu.gemini.spModel.gemini.flamingos2.Flamingos2; import edu.gemini.spModel.telescope.IssPort; import edu.gemini.spModel.type.SpTypeUtil; import jsky.app.ot.OTOptions; import jsky.app.ot.editor.eng.EngEditor; import jsky.app.ot.gemini.editor.ComponentEditor; import jsky.app.ot.gemini.parallacticangle.PositionAnglePanel; import javax.swing.*; import javax.swing.border.BevelBorder; import javax.swing.border.Border; import java.awt.*; import java.awt.event.ItemEvent; import java.awt.event.ItemListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyDescriptor; /** * User interface and editor for Flamingos 2. * * (Work in progress: slated to replace EdCompInstFlamingos2, Flamingos2Form, and Flamingos2Form.jfd) */ public class Flamingos2Editor extends ComponentEditor<ISPObsComponent, Flamingos2> implements EngEditor { private final PropertyChangeListener updateParallacticAnglePCL = new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { posAnglePanel.updateParallacticControls(); } }; private final PropertyChangeListener updateUnboundedAnglePCL = new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { posAnglePanel.updateUnboundedControls(); } }; private final class CustomMdfEnabler implements PropertyChangeListener { public void propertyChange(PropertyChangeEvent evt) { update((Flamingos2) evt.getSource()); } void update(Flamingos2 inst) { final boolean visible = inst.getFpu() == Flamingos2.FPUnit.CUSTOM_MASK; mdfLabel.setVisible(visible); mdfCtrl.getComponent().setVisible(visible); mdfCtrl.getComponent().setEnabled(visible); slitWidthLabel.setVisible(visible); customSlitCtrl.getComponent().setVisible(visible); customSlitCtrl.getComponent().setEnabled(visible); } } private final class ExposureTimeMessageUpdater implements EditListener<Flamingos2, Double>, PropertyChangeListener { private final JLabel label; ExposureTimeMessageUpdater(JLabel label) { this.label = label; } public void valueChanged(EditEvent<Flamingos2, Double> event) { update(event.getNewValue()); } public void propertyChange(PropertyChangeEvent evt) { update(); } void update() { final Flamingos2 flam2 = getDataObject(); update((flam2 == null) ? null : flam2.getExposureTime()); } void update(Double val) { final Flamingos2 flam2 = getDataObject(); Color fg = Color.black; String txt = ""; if ((flam2 != null) && (val != null)) { final double min = flam2.getMinimumExposureTimeSecs(); final double rec = flam2.getRecommendedExposureTimeSecs(); if (val < min) { fg = FATAL_FG_COLOR; txt = String.format("Below minimum (%.1f sec).", min); } else if ((val > Flamingos2.FRACTIONAL_EXP_TIME_MAX) && (val != Math.floor(val))) { fg = FATAL_FG_COLOR; txt = "Millisec precision not supported over " + Flamingos2.FRACTIONAL_EXP_TIME_MAX + " sec."; } else if (val < rec) { fg = WARNING_FG_COLOR; final String formatStr = (rec < Flamingos2.FRACTIONAL_EXP_TIME_MAX) ? "%.1f" : "%.0f"; txt = String.format("Below recommendation (" + formatStr + " sec).", rec); } } label.setText(txt); label.setForeground(fg); } } private class PreImagingChangeListener implements ItemListener, PropertyChangeListener { private static final String WARNING = "Selecting MOS pre-imaging will set the Focal Plane Unit to\n" + "'Imaging' and the Disperser to 'None'."; public void propertyChange(PropertyChangeEvent evt) { preImaging.removeItemListener(this); preImaging.setSelected(getDataObject().getMosPreimaging() == YesNoType.YES); preImaging.addItemListener(this); } public void itemStateChanged(ItemEvent e) { final JCheckBox cb = (JCheckBox) e.getSource(); final Flamingos2 inst = getDataObject(); final boolean selected = cb.isSelected(); if (selected && ((inst.getFpu() != Flamingos2.FPUnit.FPU_NONE) || (inst.getDisperser() != Flamingos2.Disperser.NONE))) { final int res = JOptionPane.showOptionDialog(pan, WARNING, "Set MOS pre-imaging?", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, new String[] {"Set MOS pre-imaging", "Cancel"}, "Cancel"); if (res != 0) { preImaging.removeItemListener(this); preImaging.setSelected(false); preImaging.addItemListener(this); return; } } inst.removePropertyChangeListener(this); inst.setMosPreimaging(selected ? YesNoType.YES : YesNoType.NO); inst.addPropertyChangeListener(this); } } private final class MessagePanel extends JPanel implements PropertyChangeListener { private class PropValueLabel extends JLabel { PropValueLabel() { setForeground(Color.black); } } private final JLabel readsLabel = new PropValueLabel(); private final JLabel readsWarning = new JLabel(); private final JLabel readNoiseLabel = new PropValueLabel(); private final JLabel recExposureTimeLabel = new PropValueLabel(); private final JLabel minExposureTimeLabel = new PropValueLabel(); private final JLabel scalePropertyLabel = new JLabel(); private final JLabel scaleLabel = new PropValueLabel(); private final JLabel scaleUnitsLabel = new JLabel(); private final JLabel fovLabel = new PropValueLabel(); private final JLabel fovUnitsLabel = new JLabel(); private final JLabel modeLabel = new JLabel(); MessagePanel() { super(new GridBagLayout()); final Border b = new ThinBorder(BevelBorder.RAISED); setBorder(BorderFactory.createCompoundBorder(b, BorderFactory.createEmptyBorder(5, 15, 5, 5))); setBackground(INFO_BG_COLOR); int row = 0; readsWarning.setText(" *See Engineering Tab"); readsWarning.setVisible(false); addProperty(row++, "Reads:", readsLabel, readsWarning); addProperty(row++, "Read Noise:", readNoiseLabel); addProperty(row++, "Exposure Time:", recExposureTimeLabel, new JLabel("sec (recommended)"), minExposureTimeLabel, new JLabel("sec (min)")); final int sepRow = row++; add(new JSeparator(), new GridBagConstraints(){{ gridy = sepRow; gridwidth = 3; weightx = 1.0; fill = HORIZONTAL; insets = new Insets(3, 0, 1, 0); }}); addProperty(row++, scalePropertyLabel, scaleLabel, scaleUnitsLabel); addProperty(row, "Science FOV:", fovLabel, fovUnitsLabel, modeLabel); // Push all to the left. final GridBagConstraints gbc = new GridBagConstraints() {{ gridx = 2; weightx = 1.0; fill = HORIZONTAL; }}; add(new JPanel() {{setOpaque(false);}}, gbc); } private void addProperty(int row, String propertyNameLabel, JLabel... propertyValueLabels) { addProperty(row, new JLabel(propertyNameLabel), propertyValueLabels); } private void addProperty(int row, JLabel propertyNameLabel, JLabel... propertyValueLabels) { final GridBagConstraints gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = row; gbc.fill = GridBagConstraints.NONE; gbc.anchor = GridBagConstraints.EAST; gbc.insets = new Insets(row == 0 ? 0 : 2, 0, 0, 0); add(propertyNameLabel, gbc); gbc.anchor = GridBagConstraints.WEST; gbc.insets = new Insets(row == 0 ? 0 : 2, 7, 0, 0); ++gbc.gridx; add(createPropertyValuePanel(propertyValueLabels), gbc); } private JPanel createPropertyValuePanel(JLabel... propertyValueLabels) { final JPanel pan = new JPanel(new GridBagLayout()) {{ setOpaque(false); }}; final GridBagConstraints gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.insets = new Insets(0, 0, 0, 0); pan.add(propertyValueLabels[0], gbc); for (int i=1; i<propertyValueLabels.length; ++i) { ++gbc.gridx; gbc.insets = new Insets(0, (i%2==1) ? 3 : 7, 0, 0); pan.add(propertyValueLabels[i], gbc); } return pan; } public void propertyChange(PropertyChangeEvent evt) { update(); } void update() { final Flamingos2 inst = getDataObject(); if (inst == null) return; final Flamingos2.ReadMode readMode = inst.getReadMode(); int reads = readMode.readCount(); boolean overridden = false; final Option<Flamingos2.Reads> engReads = inst.getReads(); if (!None.instance().equals(engReads)) { if (reads != engReads.getValue().getCount()) { overridden = true; reads = engReads.getValue().getCount(); } } readsLabel.setText(String.valueOf(reads)); if (overridden) { readsLabel.setFont(readsLabel.getFont().deriveFont(Font.BOLD | Font.ITALIC)); } else { readsLabel.setFont(readsLabel.getFont().deriveFont(Font.PLAIN)); } readsWarning.setVisible(overridden); readNoiseLabel.setText(readMode.formatReadNoise()); String time = String.format("> %.0f", readMode.recomendedExpTimeSec()); recExposureTimeLabel.setText(time); time = String.format("%.1f", readMode.minimumExpTimeSec()); minExposureTimeLabel.setText(time); // ------- final Flamingos2.FPUnit fpu = inst.getFpu(); int prec = 2; final String scaleProperty = "Pixel Scale"; final double scale = inst.getLyotWheel().getPixelScale(); String units = "arcsec/pixel"; scalePropertyLabel.setText(scaleProperty); if (scale == 0.0) { scaleLabel.setText("na"); } else { scaleLabel.setText(String.format("%."+prec+"f", scale)); } scaleUnitsLabel.setText(units); final double[] sciArea = inst.getScienceArea(); String sciAreaTxt = "na"; units = ""; if ((sciArea[0] > 0) || (sciArea[1] > 0)) { prec = (fpu.isLongslit()) ? 2 : 0; sciAreaTxt = String.format("%."+prec+"f x %.0f", sciArea[0], sciArea[1]); units = "arcsec"; } String mode = ""; if (inst.getLyotWheel().getPlateScale() == 0) { mode = "(Focusing)"; } else if (fpu == Flamingos2.FPUnit.FPU_NONE) { mode = "(Imaging)"; } else if (fpu == Flamingos2.FPUnit.CUSTOM_MASK) { mode = "(Custom Mask)"; } else if (fpu.isLongslit()) { mode = "(Spectroscopy)"; } fovLabel.setText(sciAreaTxt); fovUnitsLabel.setText(units); modeLabel.setText(mode); // TODO: DO we need to do anything for parallactic angle here? } } private final JPanel pan; private final ComboPropertyCtrl<Flamingos2, Flamingos2.FPUnit> fpuCtrl; private final JLabel mdfLabel; private final TextFieldPropertyCtrl<Flamingos2, String> mdfCtrl; private final JLabel slitWidthLabel; private final ComboPropertyCtrl<Flamingos2, Flamingos2.CustomSlitWidth> customSlitCtrl; private final JCheckBox preImaging; private final TextFieldPropertyCtrl<Flamingos2, Double> expTimeCtrl; private final ComboPropertyCtrl<Flamingos2, Flamingos2.Filter> filterCtrl; private final ComboPropertyCtrl<Flamingos2, Flamingos2.Disperser> disperserCtrl; private final ComboPropertyCtrl<Flamingos2, Flamingos2.LyotWheel> lyotCtrl; private final PositionAnglePanel<Flamingos2, Flamingos2Editor> posAnglePanel; private final CustomMdfEnabler customMdfEnabler = new CustomMdfEnabler(); private final ExposureTimeMessageUpdater exposureTimeMessageUpdater; private final PreImagingChangeListener preImagingListener; private final RadioPropertyCtrl<Flamingos2, Flamingos2.ReadMode> readModeCtrl; private final RadioPropertyCtrl<Flamingos2, IssPort> portCtrl; private final MessagePanel messagePanel; private final ComboPropertyCtrl<Flamingos2, Option<Flamingos2.WindowCover>> windowCoverCtrl; private final ComboPropertyCtrl<Flamingos2, Flamingos2.Decker> deckerCtrl; private final ComboPropertyCtrl<Flamingos2, Option<Flamingos2.ReadoutMode>> readoutModeCtrl; private final ComboPropertyCtrl<Flamingos2, Option<Flamingos2.Reads>> readsCtrl; private final CheckboxPropertyCtrl<Flamingos2> eOffsetCtrl; private static final int leftLabelCol = 0; private static final int leftWidgetCol = 1; private static final int leftUnitsCol = 2; private static final int leftGapCol = 3; private static final int centerWidgetCol = 4; private static final int centerGapCol = 5; private static final int rightLabelCol = 6; private static final int rightWidgetCol = 7; private static final int rightUnitsCol = 8; private static final int leftWidth = 3; private static final int centerWidth = 1; private static final int rightWidth = 3; private static final int centerAndRightWidth = centerWidth + rightWidth + 1; private static final int colCount = rightUnitsCol + 1; public Flamingos2Editor() { pan = new JPanel(new GridBagLayout()); pan.setBorder(PANEL_BORDER); int row = 0; GridBagConstraints gbc; // Column gaps pan.add(new JPanel(), colGapGbc(leftGapCol, row)); pan.add(new JPanel(), colGapGbc(centerGapCol, row)); // FPU and mask: takes up two rows to accommodate the Custom MDF. fpuCtrl = ComboPropertyCtrl.enumInstance(Flamingos2.FPU_PROP); addCtrl(pan, leftLabelCol, row, fpuCtrl, null); // FPU mask. PropertyDescriptor pd = Flamingos2.FPU_MASK_PROP; mdfCtrl = TextFieldPropertyCtrl.createStringInstance(pd); customSlitCtrl = ComboPropertyCtrl.enumInstance(Flamingos2.CUSTOM_SLIT_WIDTH_PROP); mdfLabel = new JLabel("Custom MDF"); pan.add(mdfLabel, propLabelGbc(leftLabelCol, row+1)); gbc = propWidgetGbc(leftWidgetCol, row+1); pan.add(mdfCtrl.getComponent(), gbc); slitWidthLabel = new JLabel("Slit Width"); pan.add(slitWidthLabel, propLabelGbc(leftLabelCol, row+2)); gbc = propWidgetGbc(leftWidgetCol, row+2); pan.add(customSlitCtrl.getComponent(), gbc); // MOS pre-imaging: takes up two rows to accommodate the Custom MDF and exposure time warning. preImaging = new JCheckBox("MOS pre-imaging"); preImagingListener = new PreImagingChangeListener(); preImaging.addItemListener(preImagingListener); gbc = propWidgetGbc(centerWidgetCol, row); gbc.gridwidth = centerWidth; pan.add(preImaging, gbc); // Exposure Time pd = Flamingos2.EXPOSURE_TIME_PROP; final JLabel exposureTimeWarning = new JLabel(""); exposureTimeMessageUpdater = new ExposureTimeMessageUpdater(exposureTimeWarning); expTimeCtrl = TextFieldPropertyCtrl.createDoubleInstance(pd, 1); expTimeCtrl.setColumns(6); expTimeCtrl.addEditListener(exposureTimeMessageUpdater); pan.add(new JLabel("Exp Time"), propLabelGbc(rightLabelCol, row)); pan.add(expTimeCtrl.getComponent(), propWidgetGbc(rightWidgetCol, row)); pan.add(new JLabel("sec"), propUnitsGbc(rightUnitsCol, row)); pan.add(exposureTimeWarning, warningLabelGbc(rightLabelCol, row+1, rightWidth)); // Increment the row by 3 since previous widgets were allotted for three rows. row += 3; // -------- SEPARATORS -------- pan.add(new JSeparator(), separatorGbc(leftLabelCol, row, leftWidth)); final JComponent posAngleSeparator = DefaultComponentFactory.getInstance().createSeparator("Position Angle"); pan.add(posAngleSeparator, separatorGbc(centerWidgetCol, row, centerAndRightWidth)); // ---------------------------- ++row; filterCtrl = ComboPropertyCtrl.enumInstance(Flamingos2.FILTER_PROP); addCtrl(pan, leftLabelCol, row, filterCtrl); // Position Angle posAnglePanel = PositionAnglePanel.apply(SPComponentType.INSTRUMENT_FLAMINGOS2); gbc = propWidgetGbc(centerWidgetCol, row, centerAndRightWidth, 2); gbc.anchor = GridBagConstraints.NORTHWEST; gbc.insets = new Insets(0, 5, 0, 0); pan.add(posAnglePanel.peer(), gbc); ++row; // REL-525: changed so that obsolete items are respected. // lyotCtrl = ComboPropertyCtrl.enumInstance(Flamingos2.LYOT_WHEEL_PROP); lyotCtrl = new ComboPropertyCtrl( Flamingos2.LYOT_WHEEL_PROP, SpTypeUtil.getSelectableItems((Class<Flamingos2.LyotWheel>) Flamingos2.LYOT_WHEEL_PROP.getPropertyType()).toArray()); addCtrl(pan, leftLabelCol, row, lyotCtrl); ++row; // Disperser falls into next row underneath. disperserCtrl = ComboPropertyCtrl.enumInstance(Flamingos2.DISPERSER_PROP); gbc = propLabelGbc(leftLabelCol, row); pan.add(new JLabel(Flamingos2.DISPERSER_PROP.getDisplayName()), gbc); gbc = propWidgetGbc(leftWidgetCol, row); pan.add(disperserCtrl.getComponent(), gbc); ++row; final JTabbedPane tabPane = new JTabbedPane(); readModeCtrl = new RadioPropertyCtrl<>(Flamingos2.READMODE_PROP); portCtrl = new RadioPropertyCtrl<>(Flamingos2.PORT_PROP); tabPane.addTab("Read Mode", getTabPanel(readModeCtrl.getComponent())); tabPane.addTab("ISS Port", getTabPanel(portCtrl.getComponent())); gbc = new GridBagConstraints(); gbc.gridx = 0; gbc.gridy = row; gbc.gridwidth = colCount; gbc.gridheight = 1; gbc.weightx = 1.0; gbc.weighty = 0; gbc.fill = GridBagConstraints.HORIZONTAL; gbc.insets = new Insets(PROPERTY_ROW_GAP, 0, 0, 0); pan.add(tabPane, gbc); ++row; messagePanel = new MessagePanel(); gbc.gridy = row; gbc.insets = new Insets(2, 0, 0, 0); pan.add(messagePanel, gbc); ++row; pan.add(new JPanel(), pushGbc(colCount, row)); // Engineering controls windowCoverCtrl = ComboPropertyCtrl.optionEnumInstance(Flamingos2.WINDOW_COVER_PROP, Flamingos2.WindowCover.class); deckerCtrl = ComboPropertyCtrl.enumInstance(Flamingos2.DECKER_PROP); readoutModeCtrl = ComboPropertyCtrl.optionEnumInstance(Flamingos2.READOUT_MODE_PROP, Flamingos2.ReadoutMode.class); readsCtrl = ComboPropertyCtrl.optionEnumInstance(Flamingos2.READS_PROP, Flamingos2.Reads.class); eOffsetCtrl = new CheckboxPropertyCtrl<>(Flamingos2.USE_ELECTRONIC_OFFSETTING_PROP); } private static JPanel getTabPanel(JComponent comp) { final JPanel pan = new JPanel(); pan.setBorder(TAB_PANEL_BORDER); pan.setLayout(new BoxLayout(pan, BoxLayout.PAGE_AXIS)); pan.add(comp); pan.add(Box.createVerticalGlue()); return pan; } public JPanel getWindow() { return pan; } public Component getEngineeringComponent() { final JPanel pan = new JPanel(new GridBagLayout()); addCtrl(pan, 0, 0, deckerCtrl); addCtrl(pan, 0, 1, readoutModeCtrl); addCtrl(pan, 0, 2, readsCtrl); addCtrl(pan, 0, 3, windowCoverCtrl); final GridBagConstraints gbc = propWidgetGbc(0, 4); gbc.gridwidth = 2; pan.add(eOffsetCtrl.getComponent(), gbc); eOffsetCtrl.getComponent().setToolTipText("Use electronic offsetting when possible."); pan.add(new JPanel(), new GridBagConstraints(){{ gridx=0; gridy=5; weighty=1.0; fill=VERTICAL; }}); return pan; } @Override public void handlePreDataObjectUpdate(Flamingos2 inst) { if (inst == null) return; inst.removePropertyChangeListener(messagePanel); inst.removePropertyChangeListener(Flamingos2.READMODE_PROP.getName(), exposureTimeMessageUpdater); inst.removePropertyChangeListener(Flamingos2.FPU_PROP.getName(), customMdfEnabler); inst.removePropertyChangeListener(Flamingos2.MOS_PREIMAGING_PROP.getName(), preImagingListener); inst.removePropertyChangeListener(Flamingos2.POS_ANGLE_PROP.getName(), updateParallacticAnglePCL); inst.removePropertyChangeListener(Flamingos2.FPU_PROP.getName(), updateParallacticAnglePCL); inst.removePropertyChangeListener(Flamingos2.DISPERSER_PROP.getName(), updateParallacticAnglePCL); inst.removePropertyChangeListener(Flamingos2.MOS_PREIMAGING_PROP.getName(), updateUnboundedAnglePCL); inst.removePropertyChangeListener(Flamingos2.FPU_PROP.getName(), updateUnboundedAnglePCL); } @Override public void handlePostDataObjectUpdate(Flamingos2 inst) { fpuCtrl.setBean(inst); mdfCtrl.setBean(inst); customSlitCtrl.setBean(inst); filterCtrl.setBean(inst); disperserCtrl.setBean(inst); lyotCtrl.setBean(inst); expTimeCtrl.setBean(inst); readModeCtrl.setBean(inst); portCtrl.setBean(inst); deckerCtrl.setBean(inst); readoutModeCtrl.setBean(inst); readsCtrl.setBean(inst); windowCoverCtrl.setBean(inst); eOffsetCtrl.setBean(inst); inst.addPropertyChangeListener(messagePanel); inst.addPropertyChangeListener(Flamingos2.FPU_PROP.getName(), customMdfEnabler); inst.addPropertyChangeListener(Flamingos2.READMODE_PROP.getName(), exposureTimeMessageUpdater); inst.addPropertyChangeListener(Flamingos2.MOS_PREIMAGING_PROP.getName(), preImagingListener); preImaging.setSelected(inst.getMosPreimaging() == YesNoType.YES); customMdfEnabler.update(inst); exposureTimeMessageUpdater.update(); messagePanel.update(); posAnglePanel.init(this, Site.GS); final boolean editable = OTOptions.areRootAndCurrentObsIfAnyEditable(getProgram(), getContextObservation()); posAnglePanel.updateEnabledState(editable); // If the position angle mode or FPU mode properties change, force an update on the parallactic angle mode. inst.addPropertyChangeListener(Flamingos2.POS_ANGLE_PROP.getName(), updateParallacticAnglePCL); inst.addPropertyChangeListener(Flamingos2.FPU_PROP.getName(), updateParallacticAnglePCL); inst.addPropertyChangeListener(Flamingos2.DISPERSER_PROP.getName(), updateParallacticAnglePCL); // If MOS preimaging or the FPU mode properties change, force an update on the unbounded angle mode. inst.addPropertyChangeListener(Flamingos2.MOS_PREIMAGING_PROP.getName(), updateUnboundedAnglePCL); inst.addPropertyChangeListener(Flamingos2.FPU_PROP.getName(), updateUnboundedAnglePCL); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.near; import java.io.Externalizable; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.CacheEntryPredicate; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.distributed.GridDistributedLockRequest; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.plugin.extensions.communication.MessageCollectionItemType; import org.apache.ignite.plugin.extensions.communication.MessageReader; import org.apache.ignite.plugin.extensions.communication.MessageWriter; import org.apache.ignite.transactions.TransactionIsolation; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; /** * Near cache lock request to primary node. 'Near' means 'Initiating node' here, not 'Near Cache'. */ public class GridNearLockRequest extends GridDistributedLockRequest { /** */ private static final long serialVersionUID = 0L; /** */ private static final int NEED_RETURN_VALUE_FLAG_MASK = 0x01; /** */ private static final int FIRST_CLIENT_REQ_FLAG_MASK = 0x02; /** */ private static final int SYNC_COMMIT_FLAG_MASK = 0x04; /** */ private static final int NEAR_CACHE_FLAG_MASK = 0x08; /** Topology version. */ private AffinityTopologyVersion topVer; /** Mini future ID. */ private int miniId; /** Filter. */ private CacheEntryPredicate[] filter; /** Array of mapped DHT versions for this entry. */ @GridToStringInclude private GridCacheVersion[] dhtVers; /** Subject ID. */ private UUID subjId; /** Task name hash. */ private int taskNameHash; /** TTL for create operation. */ private long createTtl; /** TTL for read operation. */ private long accessTtl; /** */ private byte flags; /** Transaction label. */ private String txLbl; /** * Empty constructor required for {@link Externalizable}. */ public GridNearLockRequest() { // No-op. } /** * @param cacheId Cache ID. * @param topVer Topology version. * @param nodeId Node ID. * @param threadId Thread ID. * @param futId Future ID. * @param lockVer Cache version. * @param isInTx {@code True} if implicit transaction lock. * @param isRead Indicates whether implicit lock is for read or write operation. * @param retVal Return value flag. * @param isolation Transaction isolation. * @param isInvalidate Invalidation flag. * @param timeout Lock timeout. * @param keyCnt Number of keys. * @param txSize Expected transaction size. * @param syncCommit Synchronous commit flag. * @param subjId Subject ID. * @param taskNameHash Task name hash code. * @param createTtl TTL for create operation. * @param accessTtl TTL for read operation. * @param skipStore Skip store flag. * @param firstClientReq {@code True} if first lock request for lock operation sent from client node. * @param addDepInfo Deployment info flag. * @param txLbl Transaction label. */ public GridNearLockRequest( int cacheId, @NotNull AffinityTopologyVersion topVer, UUID nodeId, long threadId, IgniteUuid futId, GridCacheVersion lockVer, boolean isInTx, boolean isRead, boolean retVal, TransactionIsolation isolation, boolean isInvalidate, long timeout, int keyCnt, int txSize, boolean syncCommit, @Nullable UUID subjId, int taskNameHash, long createTtl, long accessTtl, boolean skipStore, boolean keepBinary, boolean firstClientReq, boolean nearCache, boolean addDepInfo, @Nullable String txLbl ) { super( cacheId, nodeId, lockVer, threadId, futId, lockVer, isInTx, isRead, isolation, isInvalidate, timeout, keyCnt, txSize, skipStore, keepBinary, addDepInfo); assert topVer.compareTo(AffinityTopologyVersion.ZERO) > 0; this.topVer = topVer; this.subjId = subjId; this.taskNameHash = taskNameHash; this.createTtl = createTtl; this.accessTtl = accessTtl; this.txLbl = txLbl; dhtVers = new GridCacheVersion[keyCnt]; setFlag(syncCommit, SYNC_COMMIT_FLAG_MASK); setFlag(firstClientReq, FIRST_CLIENT_REQ_FLAG_MASK); setFlag(retVal, NEED_RETURN_VALUE_FLAG_MASK); setFlag(nearCache, NEAR_CACHE_FLAG_MASK); } /** * @return {@code True} if near cache enabled on originating node. */ public boolean nearCache() { return isFlag(NEAR_CACHE_FLAG_MASK); } /** * Sets flag mask. * * @param flag Set or clear. * @param mask Mask. */ private void setFlag(boolean flag, int mask) { flags = flag ? (byte)(flags | mask) : (byte)(flags & ~mask); } /** * Reags flag mask. * * @param mask Mask to read. * @return Flag value. */ private boolean isFlag(int mask) { return (flags & mask) != 0; } /** * @return {@code True} if first lock request for lock operation sent from client node. */ public boolean firstClientRequest() { return isFlag(FIRST_CLIENT_REQ_FLAG_MASK); } /** * @return Topology version. */ @Override public AffinityTopologyVersion topologyVersion() { return topVer; } /** * @return Subject ID. */ public UUID subjectId() { return subjId; } /** * @return Task name hash.q */ public int taskNameHash() { return taskNameHash; } /** * @return Sync commit flag. */ public boolean syncCommit() { return isFlag(SYNC_COMMIT_FLAG_MASK); } /** * @return Filter. */ public CacheEntryPredicate[] filter() { return filter; } /** * @param filter Filter. * @param ctx Context. * @throws IgniteCheckedException If failed. */ public void filter(CacheEntryPredicate[] filter, GridCacheContext ctx) throws IgniteCheckedException { this.filter = filter; } /** * @return Mini future ID. */ public int miniId() { return miniId; } /** * @param miniId Mini future Id. */ public void miniId(int miniId) { this.miniId = miniId; } /** * @return Need return value flag. */ public boolean needReturnValue() { return isFlag(NEED_RETURN_VALUE_FLAG_MASK); } /** * Adds a key. * * @param key Key. * @param retVal Flag indicating whether value should be returned. * @param dhtVer DHT version. * @param ctx Context. * @throws IgniteCheckedException If failed. */ public void addKeyBytes( KeyCacheObject key, boolean retVal, @Nullable GridCacheVersion dhtVer, GridCacheContext ctx ) throws IgniteCheckedException { dhtVers[idx] = dhtVer; // Delegate to super. addKeyBytes(key, retVal, ctx); } /** * @param idx Index of the key. * @return DHT version for key at given index. */ public GridCacheVersion dhtVersion(int idx) { return dhtVers[idx]; } /** * @return New TTL to set after entry is created, -1 to leave unchanged. */ public long createTtl() { return createTtl; } /** * @return TTL for read operation. */ public long accessTtl() { return accessTtl; } /** * @return Transaction label. */ @Nullable public String txLabel() { return txLbl; } /** {@inheritDoc} */ @Override public void prepareMarshal(GridCacheSharedContext ctx) throws IgniteCheckedException { super.prepareMarshal(ctx); if (filter != null) { GridCacheContext cctx = ctx.cacheContext(cacheId); for (CacheEntryPredicate p : filter) { if (p != null) p.prepareMarshal(cctx); } } } /** {@inheritDoc} */ @Override public void finishUnmarshal(GridCacheSharedContext ctx, ClassLoader ldr) throws IgniteCheckedException { super.finishUnmarshal(ctx, ldr); if (filter != null) { GridCacheContext cctx = ctx.cacheContext(cacheId); for (CacheEntryPredicate p : filter) { if (p != null) p.finishUnmarshal(cctx, ldr); } } } /** {@inheritDoc} */ @Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) { writer.setBuffer(buf); if (!super.writeTo(buf, writer)) return false; if (!writer.isHeaderWritten()) { if (!writer.writeHeader(directType(), fieldsCount())) return false; writer.onHeaderWritten(); } switch (writer.state()) { case 21: if (!writer.writeLong("accessTtl", accessTtl)) return false; writer.incrementState(); case 22: if (!writer.writeLong("createTtl", createTtl)) return false; writer.incrementState(); case 23: if (!writer.writeObjectArray("dhtVers", dhtVers, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 24: if (!writer.writeObjectArray("filter", filter, MessageCollectionItemType.MSG)) return false; writer.incrementState(); case 25: if (!writer.writeByte("flags", flags)) return false; writer.incrementState(); case 26: if (!writer.writeInt("miniId", miniId)) return false; writer.incrementState(); case 27: if (!writer.writeUuid("subjId", subjId)) return false; writer.incrementState(); case 28: if (!writer.writeInt("taskNameHash", taskNameHash)) return false; writer.incrementState(); case 29: if (!writer.writeAffinityTopologyVersion("topVer", topVer)) return false; writer.incrementState(); case 30: if(!writer.writeString("txLbl", txLbl)) return false; writer.incrementState(); } return true; } /** {@inheritDoc} */ @Override public boolean readFrom(ByteBuffer buf, MessageReader reader) { reader.setBuffer(buf); if (!reader.beforeMessageRead()) return false; if (!super.readFrom(buf, reader)) return false; switch (reader.state()) { case 21: accessTtl = reader.readLong("accessTtl"); if (!reader.isLastRead()) return false; reader.incrementState(); case 22: createTtl = reader.readLong("createTtl"); if (!reader.isLastRead()) return false; reader.incrementState(); case 23: dhtVers = reader.readObjectArray("dhtVers", MessageCollectionItemType.MSG, GridCacheVersion.class); if (!reader.isLastRead()) return false; reader.incrementState(); case 24: filter = reader.readObjectArray("filter", MessageCollectionItemType.MSG, CacheEntryPredicate.class); if (!reader.isLastRead()) return false; reader.incrementState(); case 25: flags = reader.readByte("flags"); if (!reader.isLastRead()) return false; reader.incrementState(); case 26: miniId = reader.readInt("miniId"); if (!reader.isLastRead()) return false; reader.incrementState(); case 27: subjId = reader.readUuid("subjId"); if (!reader.isLastRead()) return false; reader.incrementState(); case 28: taskNameHash = reader.readInt("taskNameHash"); if (!reader.isLastRead()) return false; reader.incrementState(); case 29: topVer = reader.readAffinityTopologyVersion("topVer"); if (!reader.isLastRead()) return false; reader.incrementState(); case 30: txLbl = reader.readString("txLbl"); if(!reader.isLastRead()) return false; reader.incrementState(); } return reader.afterMessageRead(GridNearLockRequest.class); } /** {@inheritDoc} */ @Override public short directType() { return 51; } /** {@inheritDoc} */ @Override public byte fieldsCount() { return 31; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridNearLockRequest.class, this, "filter", Arrays.toString(filter), "super", super.toString()); } }
/******************************************************************************* * Copyright 2011, 2012 Chris Banes. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package sl.hr_client.utils.ui.photoview; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Matrix; import android.graphics.RectF; import android.graphics.drawable.Drawable; import android.net.Uri; import android.util.AttributeSet; import android.widget.ImageView; public class PhotoView extends ImageView implements IPhotoView { private final PhotoViewAttacher mAttacher; private ScaleType mPendingScaleType; public PhotoView(Context context) { this(context, null); } public PhotoView(Context context, AttributeSet attr) { this(context, attr, 0); } public PhotoView(Context context, AttributeSet attr, int defStyle) { super(context, attr, defStyle); super.setScaleType(ScaleType.MATRIX); mAttacher = new PhotoViewAttacher(this); if (null != mPendingScaleType) { setScaleType(mPendingScaleType); mPendingScaleType = null; } } @Override public void setPhotoViewRotation(float rotationDegree) { mAttacher.setPhotoViewRotation(rotationDegree); } @Override public boolean canZoom() { return mAttacher.canZoom(); } @Override public RectF getDisplayRect() { return mAttacher.getDisplayRect(); } @Override public Matrix getDisplayMatrix() { return mAttacher.getDrawMatrix(); } @Override public boolean setDisplayMatrix(Matrix finalRectangle) { return mAttacher.setDisplayMatrix(finalRectangle); } @Override @Deprecated public float getMinScale() { return getMinimumScale(); } @Override public float getMinimumScale() { return mAttacher.getMinimumScale(); } @Override @Deprecated public float getMidScale() { return getMediumScale(); } @Override public float getMediumScale() { return mAttacher.getMediumScale(); } @Override @Deprecated public float getMaxScale() { return getMaximumScale(); } @Override public float getMaximumScale() { return mAttacher.getMaximumScale(); } @Override public float getScale() { return mAttacher.getScale(); } @Override public ScaleType getScaleType() { return mAttacher.getScaleType(); } @Override public void setAllowParentInterceptOnEdge(boolean allow) { mAttacher.setAllowParentInterceptOnEdge(allow); } @Override @Deprecated public void setMinScale(float minScale) { setMinimumScale(minScale); } @Override public void setMinimumScale(float minimumScale) { mAttacher.setMinimumScale(minimumScale); } @Override @Deprecated public void setMidScale(float midScale) { setMediumScale(midScale); } @Override public void setMediumScale(float mediumScale) { mAttacher.setMediumScale(mediumScale); } @Override @Deprecated public void setMaxScale(float maxScale) { setMaximumScale(maxScale); } @Override public void setMaximumScale(float maximumScale) { mAttacher.setMaximumScale(maximumScale); } @Override // setImageBitmap calls through to this method public void setImageDrawable(Drawable drawable) { super.setImageDrawable(drawable); if (null != mAttacher) { mAttacher.update(); } } @Override public void setImageResource(int resId) { super.setImageResource(resId); if (null != mAttacher) { mAttacher.update(); } } @Override public void setImageURI(Uri uri) { super.setImageURI(uri); if (null != mAttacher) { mAttacher.update(); } } @Override public void setOnMatrixChangeListener(PhotoViewAttacher.OnMatrixChangedListener listener) { mAttacher.setOnMatrixChangeListener(listener); } @Override public void setOnLongClickListener(OnLongClickListener l) { mAttacher.setOnLongClickListener(l); } @Override public void setOnPhotoTapListener(PhotoViewAttacher.OnPhotoTapListener listener) { mAttacher.setOnPhotoTapListener(listener); } @Override public PhotoViewAttacher.OnPhotoTapListener getOnPhotoTapListener() { return mAttacher.getOnPhotoTapListener(); } @Override public void setOnViewTapListener(PhotoViewAttacher.OnViewTapListener listener) { mAttacher.setOnViewTapListener(listener); } @Override public PhotoViewAttacher.OnViewTapListener getOnViewTapListener() { return mAttacher.getOnViewTapListener(); } @Override public void setScale(float scale) { mAttacher.setScale(scale); } @Override public void setScale(float scale, boolean animate) { mAttacher.setScale(scale, animate); } @Override public void setScale(float scale, float focalX, float focalY, boolean animate) { mAttacher.setScale(scale, focalX, focalY, animate); } @Override public void setScaleType(ScaleType scaleType) { if (null != mAttacher) { mAttacher.setScaleType(scaleType); } else { mPendingScaleType = scaleType; } } @Override public void setZoomable(boolean zoomable) { mAttacher.setZoomable(zoomable); } @Override public Bitmap getVisibleRectangleBitmap() { return mAttacher.getVisibleRectangleBitmap(); } @Override public void setZoomTransitionDuration(int milliseconds) { mAttacher.setZoomTransitionDuration(milliseconds); } @Override protected void onDetachedFromWindow() { mAttacher.cleanup(); super.onDetachedFromWindow(); } }
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.vcs.ex; import com.intellij.codeInsight.hint.EditorFragmentComponent; import com.intellij.codeInsight.hint.EditorHintListener; import com.intellij.codeInsight.hint.HintManager; import com.intellij.codeInsight.hint.HintManagerImpl; import com.intellij.diff.fragments.DiffFragment; import com.intellij.diff.util.DiffDrawUtil; import com.intellij.diff.util.DiffUtil; import com.intellij.diff.util.TextDiffType; import com.intellij.ide.DataManager; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.actionSystem.ex.ActionUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.LogicalPosition; import com.intellij.openapi.editor.RangeMarker; import com.intellij.openapi.editor.highlighter.EditorHighlighter; import com.intellij.openapi.editor.highlighter.EditorHighlighterFactory; import com.intellij.openapi.editor.highlighter.FragmentedEditorHighlighter; import com.intellij.openapi.editor.markup.RangeHighlighter; import com.intellij.openapi.fileTypes.FileType; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.TextRange; import com.intellij.ui.*; import com.intellij.util.ui.JBUI; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import java.awt.*; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.util.ArrayList; import java.util.EventObject; import java.util.List; import static com.intellij.diff.util.DiffUtil.getDiffType; public class LineStatusMarkerPopupPanel extends JPanel { @Nullable private final JComponent myEditorComponent; @NotNull private final Editor myEditor; private LineStatusMarkerPopupPanel(@NotNull Editor editor, @NotNull ActionToolbar toolbar, @Nullable JComponent editorComponent, @Nullable JComponent additionalInfo) { super(new BorderLayout()); setOpaque(false); myEditor = editor; myEditorComponent = editorComponent; boolean isEditorVisible = myEditorComponent != null; JComponent toolbarComponent = toolbar.getComponent(); toolbarComponent.setBorder(null); JComponent toolbarPanel = JBUI.Panels.simplePanel(toolbarComponent); Border outsideToolbarBorder = JBUI.Borders.customLine(getBorderColor(), 1, 1, isEditorVisible ? 0 : 1, 1); Border insideToolbarBorder = JBUI.Borders.empty(1, 5); toolbarPanel.setBorder(BorderFactory.createCompoundBorder(outsideToolbarBorder, insideToolbarBorder)); if (additionalInfo != null) { toolbarPanel.add(additionalInfo, BorderLayout.EAST); } // 'empty space' to the right of toolbar JPanel emptyPanel = new JPanel(); emptyPanel.setOpaque(false); emptyPanel.setPreferredSize(new Dimension()); JPanel topPanel = new JPanel(new BorderLayout()); topPanel.setOpaque(false); topPanel.add(toolbarPanel, BorderLayout.WEST); topPanel.add(emptyPanel, BorderLayout.CENTER); add(topPanel, BorderLayout.NORTH); if (myEditorComponent != null) add(myEditorComponent, BorderLayout.CENTER); // transfer clicks into editor MouseAdapter listener = new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { transferEvent(e, editor); } @Override public void mouseClicked(MouseEvent e) { transferEvent(e, editor); } @Override public void mouseReleased(MouseEvent e) { transferEvent(e, editor); } }; emptyPanel.addMouseListener(listener); } @NotNull public Editor getEditor() { return myEditor; } private static void transferEvent(MouseEvent e, Editor editor) { editor.getContentComponent().dispatchEvent(SwingUtilities.convertMouseEvent(e.getComponent(), e, editor.getContentComponent())); } int getEditorTextOffset() { return EditorFragmentComponent.createEditorFragmentBorder(myEditor).getBorderInsets(myEditorComponent).left; } @Override public Dimension getPreferredSize() { int gap = JBUI.scale(10); Rectangle screenRectangle = ScreenUtil.getScreenRectangle(myEditor.getComponent()); Rectangle maxSize = new Rectangle(screenRectangle.width - gap, screenRectangle.height - gap); Dimension size = super.getPreferredSize(); if (size.width > maxSize.width) { size.width = maxSize.width; // Space for horizontal scrollbar size.height += JBUI.scale(20); } if (size.height > maxSize.height) { size.height = maxSize.height; } return size; } public static void showPopupAt(@NotNull Editor editor, @NotNull ActionToolbar toolbar, @Nullable JComponent editorComponent, @Nullable JComponent additionalInfoPanel, @Nullable Point mousePosition, @NotNull Disposable childDisposable, @Nullable DataProvider dataProvider) { LineStatusMarkerPopupPanel popupPanel = new LineStatusMarkerPopupPanel(editor, toolbar, editorComponent, additionalInfoPanel); if (dataProvider != null) DataManager.registerDataProvider(popupPanel, dataProvider); toolbar.setTargetComponent(popupPanel); toolbar.updateActionsImmediately(); // we need valid ActionToolbar.getPreferredSize() to calc size of popup LightweightHint hint = new LightweightHint(popupPanel); HintListener closeListener = __ -> Disposer.dispose(childDisposable); hint.addHintListener(closeListener); int line = editor.getCaretModel().getLogicalPosition().line; Point point = HintManagerImpl.getHintPosition(hint, editor, new LogicalPosition(line, 0), HintManager.UNDER); if (mousePosition != null) { // show right after the nearest line int lineHeight = editor.getLineHeight(); int delta = (point.y - mousePosition.y) % lineHeight; if (delta < 0) delta += lineHeight; point.y = mousePosition.y + delta; } point.x -= popupPanel.getEditorTextOffset(); // align main editor with the one in popup int flags = HintManager.HIDE_BY_CARET_MOVE | HintManager.HIDE_BY_TEXT_CHANGE | HintManager.HIDE_BY_SCROLLING; HintManagerImpl.getInstanceImpl().showEditorHint(hint, editor, point, flags, -1, false, new HintHint(editor, point)); ApplicationManager.getApplication().getMessageBus().connect(childDisposable) .subscribe(EditorHintListener.TOPIC, (project, newHint, newHintFlags) -> { // Ex: if popup re-shown by ToggleByWordDiffAction if (newHint.getComponent() instanceof LineStatusMarkerPopupPanel) { LineStatusMarkerPopupPanel newPopupPanel = (LineStatusMarkerPopupPanel)newHint.getComponent(); if (newPopupPanel.getEditor().equals(editor)) { hint.hide(); } } }); if (!hint.isVisible()) { closeListener.hintHidden(new EventObject(hint)); } } @NotNull public static EditorTextField createTextField(@NotNull Editor editor, @NotNull String content) { EditorTextField field = new EditorTextField(content); field.setBorder(null); field.setOneLineMode(false); field.ensureWillComputePreferredSize(); field.setFontInheritedFromLAF(false); field.addSettingsProvider(uEditor -> { uEditor.setVerticalScrollbarVisible(true); uEditor.setHorizontalScrollbarVisible(true); uEditor.setRendererMode(true); uEditor.setBorder(null); uEditor.setColorsScheme(editor.getColorsScheme()); uEditor.setBackgroundColor(getEditorBackgroundColor(editor)); uEditor.getSettings().setCaretRowShown(false); uEditor.getSettings().setTabSize(editor.getSettings().getTabSize(editor.getProject())); uEditor.getSettings().setUseTabCharacter(editor.getSettings().isUseTabCharacter(editor.getProject())); }); DataManager.registerDataProvider(field, data -> { if (CommonDataKeys.HOST_EDITOR.is(data)) { return field.getEditor(); } return null; }); return field; } @NotNull public static JComponent createEditorComponent(@NotNull Editor editor, @NotNull EditorTextField textField) { JPanel editorComponent = JBUI.Panels.simplePanel(textField); editorComponent.setBorder(createEditorFragmentBorder()); editorComponent.setBackground(getEditorBackgroundColor(editor)); return editorComponent; } @NotNull public static Border createEditorFragmentBorder() { Border outsideEditorBorder = JBUI.Borders.customLine(getBorderColor(), 1); Border insideEditorBorder = JBUI.Borders.empty(2); return BorderFactory.createCompoundBorder(outsideEditorBorder, insideEditorBorder); } public static Color getEditorBackgroundColor(@NotNull Editor editor) { return EditorFragmentComponent.getBackgroundColor(editor, true); } @NotNull public static Color getBorderColor() { return new JBColor(Gray._206, Gray._75); } @NotNull public static ActionToolbar buildToolbar(@NotNull Editor editor, @NotNull List<AnAction> actions, @NotNull Disposable parentDisposable) { JComponent editorComponent = editor.getComponent(); for (AnAction action : actions) { DiffUtil.registerAction(action, editorComponent); } Disposer.register(parentDisposable, () -> ActionUtil.getActions(editorComponent).removeAll(actions)); ActionToolbar toolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.TOOLBAR, new DefaultActionGroup(actions), true); toolbar.setReservePlaceAutoPopupIcon(false); return toolbar; } public static void installBaseEditorSyntaxHighlighters(@Nullable Project project, @NotNull EditorTextField textField, @NotNull Document vcsDocument, TextRange vcsTextRange, @NotNull FileType fileType) { EditorHighlighter highlighter = EditorHighlighterFactory.getInstance().createEditorHighlighter(project, fileType); highlighter.setText(vcsDocument.getImmutableCharSequence()); FragmentedEditorHighlighter fragmentedHighlighter = new FragmentedEditorHighlighter(highlighter, vcsTextRange); textField.addSettingsProvider(uEditor -> uEditor.setHighlighter(fragmentedHighlighter)); } public static void installPopupEditorWordHighlighters(@NotNull EditorTextField textField, @Nullable List<? extends DiffFragment> wordDiff) { if (wordDiff == null) return; textField.addSettingsProvider(uEditor -> { for (DiffFragment fragment : wordDiff) { int vcsStart = fragment.getStartOffset1(); int vcsEnd = fragment.getEndOffset1(); TextDiffType type = getDiffType(fragment); DiffDrawUtil.createInlineHighlighter(uEditor, vcsStart, vcsEnd, type); } }); } public static void installMasterEditorWordHighlighters(@NotNull Editor editor, int currentStartOffset, @NotNull List<? extends DiffFragment> wordDiff, @NotNull Disposable parentDisposable) { final List<RangeHighlighter> highlighters = new ArrayList<>(); for (DiffFragment fragment : wordDiff) { int currentStart = currentStartOffset + fragment.getStartOffset2(); int currentEnd = currentStartOffset + fragment.getEndOffset2(); TextDiffType type = getDiffType(fragment); highlighters.addAll(DiffDrawUtil.createInlineHighlighter(editor, currentStart, currentEnd, type)); } Disposer.register(parentDisposable, () -> highlighters.forEach(RangeMarker::dispose)); } }
/* * reserved comment block * DO NOT REMOVE OR ALTER! */ /* * Copyright 2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.sun.org.apache.xerces.internal.impl.xs.traversers; import com.sun.org.apache.xerces.internal.impl.xs.opti.SchemaDOMParser; import com.sun.org.apache.xerces.internal.util.NamespaceSupport; import com.sun.org.apache.xerces.internal.util.SAXLocatorWrapper; import com.sun.org.apache.xerces.internal.util.SymbolTable; import com.sun.org.apache.xerces.internal.util.XMLAttributesImpl; import com.sun.org.apache.xerces.internal.util.XMLSymbols; import com.sun.org.apache.xerces.internal.xni.NamespaceContext; import com.sun.org.apache.xerces.internal.xni.QName; import com.sun.org.apache.xerces.internal.xni.XMLString; import com.sun.org.apache.xerces.internal.xni.XNIException; import com.sun.org.apache.xerces.internal.xni.parser.XMLParseException; import org.w3c.dom.Document; import org.xml.sax.Attributes; import org.xml.sax.ContentHandler; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.helpers.LocatorImpl; /** * <p>SchemaContentHandler converts SAX events into XNI * and passes them directly to the SchemaDOMParser.</p> * * @xerces.internal * * @author Michael Glavassevich, IBM * @author Jack Z. Wang, IBM * */ final class SchemaContentHandler implements ContentHandler { /** Symbol table **/ private SymbolTable fSymbolTable; /** SchemaDOMParser, events will be delegated to SchemaDOMParser to pass */ private SchemaDOMParser fSchemaDOMParser; /** XML Locator wrapper for SAX. **/ private final SAXLocatorWrapper fSAXLocatorWrapper = new SAXLocatorWrapper(); /** The namespace context of this document: stores namespaces in scope */ private NamespaceSupport fNamespaceContext = new NamespaceSupport(); /** Indicate if push NamespaceContest is needed */ private boolean fNeedPushNSContext; /** Flag used to track whether namespace declarations are reported as attributes. */ private boolean fNamespacePrefixes = false; /** Flag used to track whether XML names and Namespace URIs have been internalized. */ private boolean fStringsInternalized = false; /** Fields for start element, end element and characters. */ private final QName fElementQName = new QName(); private final QName fAttributeQName = new QName(); private final XMLAttributesImpl fAttributes = new XMLAttributesImpl(); private final XMLString fTempString = new XMLString(); /** * <p>Constructs an SchemaContentHandler.</p> */ public SchemaContentHandler() {} /* * @see org.xml.sax.ContentHandler#setDocumentLocator(org.xml.sax.Locator) */ public Document getDocument() { return fSchemaDOMParser.getDocument(); } /* * @see org.xml.sax.ContentHandler#setDocumentLocator(org.xml.sax.Locator) */ public void setDocumentLocator(Locator locator) { fSAXLocatorWrapper.setLocator(locator); } /* * @see org.xml.sax.ContentHandler#startDocument() */ public void startDocument() throws SAXException { fNeedPushNSContext = true; try { fSchemaDOMParser.startDocument(fSAXLocatorWrapper, null, fNamespaceContext, null); } catch (XMLParseException e) { convertToSAXParseException(e); } catch (XNIException e) { convertToSAXException(e); } } /* * @see org.xml.sax.ContentHandler#endDocument() */ public void endDocument() throws SAXException { fSAXLocatorWrapper.setLocator(null); try { fSchemaDOMParser.endDocument(null); } catch (XMLParseException e) { convertToSAXParseException(e); } catch (XNIException e) { convertToSAXException(e); } } /* * @see org.xml.sax.ContentHandler#startPrefixMapping(java.lang.String, java.lang.String) */ public void startPrefixMapping(String prefix, String uri) throws SAXException { if (fNeedPushNSContext) { fNeedPushNSContext = false; fNamespaceContext.pushContext(); } if (!fStringsInternalized) { prefix = (prefix != null) ? fSymbolTable.addSymbol(prefix) : XMLSymbols.EMPTY_STRING; uri = (uri != null && uri.length() > 0) ? fSymbolTable.addSymbol(uri) : null; } else { if (prefix == null) { prefix = XMLSymbols.EMPTY_STRING; } if (uri != null && uri.length() == 0) { uri = null; } } fNamespaceContext.declarePrefix(prefix, uri); } /* * @see org.xml.sax.ContentHandler#endPrefixMapping(java.lang.String) */ public void endPrefixMapping(String prefix) throws SAXException { // do nothing } /* * @see org.xml.sax.ContentHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes) */ public void startElement(String uri, String localName, String qName, Attributes atts) throws SAXException { if (fNeedPushNSContext) { fNamespaceContext.pushContext(); } fNeedPushNSContext = true; // Fill element QName and XMLAttributes fillQName(fElementQName, uri, localName, qName); fillXMLAttributes(atts); // Add namespace declarations if necessary if (!fNamespacePrefixes) { final int prefixCount = fNamespaceContext.getDeclaredPrefixCount(); if (prefixCount > 0) { addNamespaceDeclarations(prefixCount); } } try { fSchemaDOMParser.startElement(fElementQName, fAttributes, null); } catch (XMLParseException e) { convertToSAXParseException(e); } catch (XNIException e) { convertToSAXException(e); } } /* * @see org.xml.sax.ContentHandler#endElement(java.lang.String, java.lang.String, java.lang.String) */ public void endElement(String uri, String localName, String qName) throws SAXException { fillQName(fElementQName, uri, localName, qName); try { fSchemaDOMParser.endElement(fElementQName, null); } catch (XMLParseException e) { convertToSAXParseException(e); } catch (XNIException e) { convertToSAXException(e); } finally { fNamespaceContext.popContext(); } } /* * @see org.xml.sax.ContentHandler#characters(char[], int, int) */ public void characters(char[] ch, int start, int length) throws SAXException { try { fTempString.setValues(ch, start, length); fSchemaDOMParser.characters(fTempString, null); } catch (XMLParseException e) { convertToSAXParseException(e); } catch (XNIException e) { convertToSAXException(e); } } /* * @see org.xml.sax.ContentHandler#ignorableWhitespace(char[], int, int) */ public void ignorableWhitespace(char[] ch, int start, int length) throws SAXException { try { fTempString.setValues(ch, start, length); fSchemaDOMParser.ignorableWhitespace(fTempString, null); } catch (XMLParseException e) { convertToSAXParseException(e); } catch (XNIException e) { convertToSAXException(e); } } /* * @see org.xml.sax.ContentHandler#processingInstruction(java.lang.String, java.lang.String) */ public void processingInstruction(String target, String data) throws SAXException { try { fTempString.setValues(data.toCharArray(), 0, data.length()); fSchemaDOMParser.processingInstruction(target, fTempString, null); } catch (XMLParseException e) { convertToSAXParseException(e); } catch (XNIException e) { convertToSAXException(e); } } /* * @see org.xml.sax.ContentHandler#skippedEntity(java.lang.String) */ public void skippedEntity(String arg) throws SAXException { // do-nothing } /* * Other methods */ private void fillQName(QName toFill, String uri, String localpart, String rawname) { if (!fStringsInternalized) { uri = (uri != null && uri.length() > 0) ? fSymbolTable.addSymbol(uri) : null; localpart = (localpart != null) ? fSymbolTable.addSymbol(localpart) : XMLSymbols.EMPTY_STRING; rawname = (rawname != null) ? fSymbolTable.addSymbol(rawname) : XMLSymbols.EMPTY_STRING; } else { if (uri != null && uri.length() == 0) { uri = null; } if (localpart == null) { localpart = XMLSymbols.EMPTY_STRING; } if (rawname == null) { rawname = XMLSymbols.EMPTY_STRING; } } String prefix = XMLSymbols.EMPTY_STRING; int prefixIdx = rawname.indexOf(':'); if (prefixIdx != -1) { prefix = fSymbolTable.addSymbol(rawname.substring(0, prefixIdx)); // local part may be an empty string if this is a namespace declaration if (localpart == XMLSymbols.EMPTY_STRING) { localpart = fSymbolTable.addSymbol(rawname.substring(prefixIdx + 1)); } } // local part may be an empty string if this is a namespace declaration else if (localpart == XMLSymbols.EMPTY_STRING) { localpart = rawname; } toFill.setValues(prefix, localpart, rawname, uri); } private void fillXMLAttributes(Attributes atts) { fAttributes.removeAllAttributes(); final int attrCount = atts.getLength(); for (int i = 0; i < attrCount; ++i) { fillQName(fAttributeQName, atts.getURI(i), atts.getLocalName(i), atts.getQName(i)); String type = atts.getType(i); fAttributes.addAttributeNS(fAttributeQName, (type != null) ? type : XMLSymbols.fCDATASymbol, atts.getValue(i)); fAttributes.setSpecified(i, true); } } private void addNamespaceDeclarations(final int prefixCount) { String prefix = null; String localpart = null; String rawname = null; String nsPrefix = null; String nsURI = null; for (int i = 0; i < prefixCount; ++i) { nsPrefix = fNamespaceContext.getDeclaredPrefixAt(i); nsURI = fNamespaceContext.getURI(nsPrefix); if (nsPrefix.length() > 0) { prefix = XMLSymbols.PREFIX_XMLNS; localpart = nsPrefix; rawname = fSymbolTable.addSymbol(prefix + ":" + localpart); } else { prefix = XMLSymbols.EMPTY_STRING; localpart = XMLSymbols.PREFIX_XMLNS; rawname = XMLSymbols.PREFIX_XMLNS; } fAttributeQName.setValues(prefix, localpart, rawname, NamespaceContext.XMLNS_URI); fAttributes.addAttribute(fAttributeQName, XMLSymbols.fCDATASymbol, nsURI); } } public void reset(SchemaDOMParser schemaDOMParser, SymbolTable symbolTable, boolean namespacePrefixes, boolean stringsInternalized) { fSchemaDOMParser = schemaDOMParser; fSymbolTable = symbolTable; fNamespacePrefixes = namespacePrefixes; fStringsInternalized = stringsInternalized; } /* * Static methods */ static void convertToSAXParseException(XMLParseException e) throws SAXException { Exception ex = e.getException(); if (ex == null) { // must be a parser exception; mine it for locator info and throw // a SAXParseException LocatorImpl locatorImpl = new LocatorImpl(); locatorImpl.setPublicId(e.getPublicId()); locatorImpl.setSystemId(e.getExpandedSystemId()); locatorImpl.setLineNumber(e.getLineNumber()); locatorImpl.setColumnNumber(e.getColumnNumber()); throw new SAXParseException(e.getMessage(), locatorImpl); } if (ex instanceof SAXException) { // why did we create an XMLParseException? throw (SAXException) ex; } throw new SAXException(ex); } static void convertToSAXException(XNIException e) throws SAXException { Exception ex = e.getException(); if (ex == null) { throw new SAXException(e.getMessage()); } if (ex instanceof SAXException) { throw (SAXException) ex; } throw new SAXException(ex); } } // SchemaContentHandler
package br.cefetrj.sagitarii.teapot; /** * Copyright 2015 Carlos Magno Abreu * magno.mabreu@gmail.com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.UUID; import br.cefetrj.sagitarii.teapot.comm.FileUnity; public class Activation implements Comparable<Activation> { private int order; private String fragment; private String experiment; private String workflow; private String activitySerial; private String command; private String instanceSerial; private String taskId; private List<String> sourceData = new ArrayList<String>(); private Activation previousActivation; private String xmlOriginalData; private String type; private String executor; private String executorType; private String targetTable; private List<FileUnity> files; private TaskStatus status = TaskStatus.QUEUED; private String wrappersFolder; public void setWrappersFolder(String wrappersFolder) { this.wrappersFolder = wrappersFolder; } public String getWrappersFolder() { return wrappersFolder; } public TaskStatus getStatus() { return status; } public void setStatus(TaskStatus status) { this.status = status; } public String getTaskId() { return taskId; } public Activation() { files = new ArrayList<FileUnity>(); UUID uuid = UUID.randomUUID(); taskId = uuid.toString().toUpperCase().substring(0,8); } public void addFile( FileUnity file ) { files.add( file ); } public List<FileUnity> getFiles() { return files; } public String getExecutor() { return executor; } public void setExecutor(String executor) { this.executor = executor; } public String getExecutorType() { return executorType; } public void setExecutorType(String executorType) { this.executorType = executorType; } public String getType() { return type; } public void setType(String type) { this.type = type; } public String getExperimentRootFolder() throws Exception { File f = new File(this.getClass().getProtectionDomain().getCodeSource().getLocation().toURI().getPath() ); String teapotRoot = f.getAbsolutePath(); teapotRoot = teapotRoot.substring(0, teapotRoot.lastIndexOf( File.separator ) + 1).replace(File.separator, "/"); return teapotRoot + "namespaces" + "/" + workflow + "/" + experiment ; } public String getNamespace() { String result = ""; try { result = getExperimentRootFolder() + "/" + fragment + "/" + instanceSerial + "/" + executor; } catch ( Exception e ) { } return result; } public int getOrder() { return order; } public void setOrder(int order) { this.order = order; } public String getFragment() { return fragment; } public void setFragment(String fragment) { this.fragment = fragment; } public String getActivitySerial() { return activitySerial; } public void setActivitySerial(String serial) { this.activitySerial = serial; } public String getCommand() { return command; } public void setCommand(String command) { this.command = command; } public String getInstanceSerial() { return instanceSerial; } public void setInstanceSerial(String instanceSerial) { this.instanceSerial = instanceSerial; } @Override public int compareTo(Activation pipe) { return ( (Integer)pipe.getOrder() ).compareTo( (Integer)order ); } public List<String> getSourceData() { return sourceData; } public void setSourceData(List<String> sourceData) { this.sourceData = sourceData; } public Activation getPreviousActivation() { return previousActivation; } public void setPreviousActivation(Activation previousActivation) { this.previousActivation = previousActivation; } public String getExperiment() { return experiment; } public void setExperiment(String experiment) { this.experiment = experiment; } public String getWorkflow() { return workflow; } public void setWorkflow(String workflow) { this.workflow = workflow; } public String getXmlOriginalData() { return xmlOriginalData; } public void setXmlOriginalData(String xmlOriginalData) { this.xmlOriginalData = xmlOriginalData; } public String getTargetTable() { return targetTable; } public void setTargetTable(String targetTable) { this.targetTable = targetTable; } }
/******************************************************************************* * Copyright (c) 2016 - 2017 * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. *******************************************************************************/ package jsettlers.ai.army; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.Vector; import jsettlers.ai.highlevel.AiStatistics; import jsettlers.common.buildings.EBuildingType; import jsettlers.common.material.EMaterialType; import jsettlers.common.movable.EMovableType; import jsettlers.common.movable.ESoldierType; import jsettlers.common.position.ShortPoint2D; import jsettlers.graphics.action.SetMaterialProductionAction.EMaterialProductionType; import jsettlers.input.tasks.MoveToGuiTask; import jsettlers.input.tasks.SetMaterialProductionGuiTask; import jsettlers.input.tasks.UpgradeSoldiersGuiTask; import jsettlers.logic.map.grid.movable.MovableGrid; import jsettlers.logic.player.Player; import jsettlers.network.client.interfaces.ITaskScheduler; /** * This general is named winner because his attacks and defence should be very hard for human enemies. This should be realized by creating locally superiority. (You can kill 200 bowmen with just 100 * bowmen if you fight 100 vs 20 in loops. This general should lay the focus on some swordsmen to occupy own towers, 20 spearmen to defeat rushes and the rest only bowmen because in mass this is the * strongest military unit. It upgrades bowmen first because this is the main unit and the 20 defeating spearmen defeats with lv1 as well. This general should store bows until level3 is reached to get * as many level3 bowmen as posibble. TODO: store bows until level3 is reached TODO: group soldiers in direction of enemy groups to defeat them TODO: group soldiers in direction of enemy groups to * attack them * * @author codingberlin */ public class ConfigurableGeneral implements ArmyGeneral { private static final byte MIN_ATTACKER_COUNT = 20; private static final byte MIN_SWORDSMEN_COUNT = 10; private static final byte MIN_PIKEMEN_COUNT = 20; private static final int BOWMEN_COUNT_OF_KILLING_INFANTRY = 300; private static final EBuildingType[] MIN_BUILDING_REQUIREMENTS_FOR_ATTACK = { EBuildingType.COALMINE, EBuildingType.IRONMINE, EBuildingType.IRONMELT, EBuildingType.WEAPONSMITH, EBuildingType.BARRACK }; private final AiStatistics aiStatistics; private final Player player; private final ITaskScheduler taskScheduler; private final MovableGrid movableGrid; private float attackerCountFactor; public ConfigurableGeneral(AiStatistics aiStatistics, Player player, MovableGrid movableGrid, ITaskScheduler taskScheduler, float attackerCountFactor) { this.aiStatistics = aiStatistics; this.player = player; this.taskScheduler = taskScheduler; this.movableGrid = movableGrid; this.attackerCountFactor = attackerCountFactor; } @Override public void commandTroops(Set<Integer> soldiersWithOrders) { Situation situation = calculateSituation(player.playerId); if (aiStatistics.getEnemiesInTownOf(player.playerId).size() > 0) { defend(situation, soldiersWithOrders); } else if (enemiesAreAlive()) { byte weakestEnemyId = getWeakestEnemy(); Situation enemySituation = calculateSituation(weakestEnemyId); boolean infantryWouldDie = wouldInfantryDie(enemySituation); if (attackIsPossible(situation, enemySituation, infantryWouldDie)) { attack(situation, infantryWouldDie, soldiersWithOrders); } } } private boolean attackIsPossible(Situation situation, Situation enemySituation, boolean infantryWouldDie) { for (EBuildingType requiredType : MIN_BUILDING_REQUIREMENTS_FOR_ATTACK) { if (aiStatistics.getNumberOfBuildingTypeForPlayer(requiredType, player.playerId) < 1) { return false; } } float combatStrength = player.getCombatStrengthInformation().getCombatStrength(false); float effectiveAttackerCount; if (infantryWouldDie) { effectiveAttackerCount = situation.bowmenPositions.size() * combatStrength; } else { effectiveAttackerCount = situation.getSoldiersCount() * combatStrength; } return effectiveAttackerCount >= MIN_ATTACKER_COUNT && effectiveAttackerCount * attackerCountFactor > enemySituation.getSoldiersCount(); } private boolean wouldInfantryDie(Situation enemySituation) { return enemySituation.bowmenPositions.size() > BOWMEN_COUNT_OF_KILLING_INFANTRY; } private boolean enemiesAreAlive() { for (byte enemyId : aiStatistics.getEnemiesOf(player.playerId)) { if (aiStatistics.isAlive(enemyId)) { return true; } } return false; } @Override public void levyUnits() { if (!upgradeSoldiers(ESoldierType.BOWMAN)) if (!upgradeSoldiers(ESoldierType.PIKEMAN)) upgradeSoldiers(ESoldierType.SWORDSMAN); int missingSwordsmenCount = Math.max(0, MIN_SWORDSMEN_COUNT - aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.SWORDSMAN_L1, player.playerId).size() - aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.SWORDSMAN_L2, player.playerId).size() - aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.SWORDSMAN_L3, player.playerId).size()); int missingSpearmenCount = Math.max(0, MIN_PIKEMEN_COUNT - aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.PIKEMAN_L1, player.playerId).size() - aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.PIKEMAN_L2, player.playerId).size() - aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.PIKEMAN_L3, player.playerId).size()); int bowmenCount = aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.BOWMAN_L1, player.playerId).size() + aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.BOWMAN_L2, player.playerId).size() + aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.BOWMAN_L3, player.playerId).size(); if (missingSwordsmenCount > 0) { setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.SWORD, missingSwordsmenCount); setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.SPEAR, 0); setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.BOW, 0); setRatioOfMaterial(player.playerId, EMaterialType.SWORD, 0F); setRatioOfMaterial(player.playerId, EMaterialType.SPEAR, 1F); setRatioOfMaterial(player.playerId, EMaterialType.BOW, 0F); } else if (missingSpearmenCount > 0) { setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.SWORD, 0); setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.SPEAR, missingSpearmenCount); setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.BOW, 0); setRatioOfMaterial(player.playerId, EMaterialType.SWORD, 0F); setRatioOfMaterial(player.playerId, EMaterialType.SPEAR, 0.3F); setRatioOfMaterial(player.playerId, EMaterialType.BOW, 1F); } else if (bowmenCount * player.getCombatStrengthInformation().getCombatStrength(false) < BOWMEN_COUNT_OF_KILLING_INFANTRY) { setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.SWORD, 0); setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.SPEAR, 0); setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.BOW, 0); setRatioOfMaterial(player.playerId, EMaterialType.SWORD, 0F); setRatioOfMaterial(player.playerId, EMaterialType.SPEAR, 0.3F); setRatioOfMaterial(player.playerId, EMaterialType.BOW, 1F); } else { setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.SWORD, 0); setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.SPEAR, 0); setNumberOfFutureProducedMaterial(player.playerId, EMaterialType.BOW, 0); setRatioOfMaterial(player.playerId, EMaterialType.SWORD, 0F); setRatioOfMaterial(player.playerId, EMaterialType.SPEAR, 0F); setRatioOfMaterial(player.playerId, EMaterialType.BOW, 1F); } } private void setNumberOfFutureProducedMaterial(byte playerId, EMaterialType materialType, int numberToProduce) { if (aiStatistics.getMaterialProduction(playerId).getAbsoluteProductionRequest(materialType) != numberToProduce) { taskScheduler.scheduleTask(new SetMaterialProductionGuiTask(playerId, aiStatistics.getPositionOfPartition(playerId), materialType, EMaterialProductionType.SET_PRODUCTION, numberToProduce)); } } private void setRatioOfMaterial(byte playerId, EMaterialType materialType, float ratio) { if (aiStatistics.getMaterialProduction(playerId).getRelativeProductionRequest(materialType) != ratio) { taskScheduler.scheduleTask(new SetMaterialProductionGuiTask(playerId, aiStatistics.getPositionOfPartition(playerId), materialType, EMaterialProductionType.SET_RATIO, ratio)); } } private boolean upgradeSoldiers(ESoldierType type) { if (player.getMannaInformation().isUpgradePossible(type)) { taskScheduler.scheduleTask(new UpgradeSoldiersGuiTask(player.playerId, type)); return true; } return false; } private void defend(Situation situation, Set<Integer> soldiersWithOrders) { List<ShortPoint2D> allMyTroops = new Vector<>(); allMyTroops.addAll(situation.bowmenPositions); allMyTroops.addAll(situation.pikemenPositions); allMyTroops.addAll(situation.swordsmenPositions); sendTroopsTo(allMyTroops, aiStatistics.getEnemiesInTownOf(player.playerId).iterator().next(), soldiersWithOrders); } private void attack(Situation situation, boolean infantryWouldDie, Set<Integer> soldiersWithOrders) { byte enemyId = getWeakestEnemy(); ShortPoint2D targetDoor = getTargetEnemyDoorToAttack(enemyId); if (infantryWouldDie) { sendTroopsTo(situation.bowmenPositions, targetDoor, soldiersWithOrders); } else { List<ShortPoint2D> soldiers = new ArrayList<>(situation.bowmenPositions.size() + situation.pikemenPositions.size() + situation.swordsmenPositions.size()); soldiers.addAll(situation.bowmenPositions); soldiers.addAll(situation.pikemenPositions); soldiers.addAll(situation.swordsmenPositions); sendTroopsTo(soldiers, targetDoor, soldiersWithOrders); } } private byte getWeakestEnemy() { byte weakestEnemyId = 0; int minAmountOfEnemyId = Integer.MAX_VALUE; for (byte enemyId : aiStatistics.getEnemiesOf(player.playerId)) { if (aiStatistics.isAlive(enemyId)) { int amountOfEnemyTroops = aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.BOWMAN_L1, enemyId).size(); amountOfEnemyTroops += aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.BOWMAN_L2, enemyId).size(); amountOfEnemyTroops += aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.BOWMAN_L3, enemyId).size(); amountOfEnemyTroops += aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.PIKEMAN_L1, enemyId).size(); amountOfEnemyTroops += aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.PIKEMAN_L2, enemyId).size(); amountOfEnemyTroops += aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.PIKEMAN_L3, enemyId).size(); amountOfEnemyTroops += aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.SWORDSMAN_L1, enemyId).size(); amountOfEnemyTroops += aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.SWORDSMAN_L2, enemyId).size(); amountOfEnemyTroops += aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.SWORDSMAN_L3, enemyId).size(); if (amountOfEnemyTroops < minAmountOfEnemyId) { minAmountOfEnemyId = amountOfEnemyTroops; weakestEnemyId = enemyId; } } } return weakestEnemyId; } private void sendTroopsTo(List<ShortPoint2D> attackerPositions, ShortPoint2D target, Set<Integer> soldiersWithOrders) { List<Integer> attackerIds = new Vector<>(); for (ShortPoint2D attackerPosition : attackerPositions) { int movableId = movableGrid.getMovableAt(attackerPosition.x, attackerPosition.y).getID(); if (!soldiersWithOrders.contains(movableId)) { attackerIds.add(movableId); } } taskScheduler.scheduleTask(new MoveToGuiTask(player.playerId, target, attackerIds)); } private ShortPoint2D getTargetEnemyDoorToAttack(byte enemyToAttackId) { List<ShortPoint2D> myMilitaryBuildings = aiStatistics.getBuildingPositionsOfTypesForPlayer(EBuildingType.getMilitaryBuildings(), player.playerId); ShortPoint2D myBaseAveragePoint = aiStatistics.calculateAveragePointFromList(myMilitaryBuildings); List<ShortPoint2D> enemyMilitaryBuildings = aiStatistics.getBuildingPositionsOfTypesForPlayer(EBuildingType.getMilitaryBuildings(), enemyToAttackId); return aiStatistics.getBuildingAt(AiStatistics.detectNearestPointFromList(myBaseAveragePoint, enemyMilitaryBuildings)).getDoor(); } private Situation calculateSituation(byte playerId) { Situation situation = new Situation(); situation.swordsmenPositions.addAll(aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.SWORDSMAN_L1, playerId)); situation.swordsmenPositions.addAll(aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.SWORDSMAN_L2, playerId)); situation.swordsmenPositions.addAll(aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.SWORDSMAN_L3, playerId)); situation.bowmenPositions.addAll(aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.BOWMAN_L1, playerId)); situation.bowmenPositions.addAll(aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.BOWMAN_L2, playerId)); situation.bowmenPositions.addAll(aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.BOWMAN_L3, playerId)); situation.pikemenPositions.addAll(aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.PIKEMAN_L1, playerId)); situation.pikemenPositions.addAll(aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.PIKEMAN_L2, playerId)); situation.pikemenPositions.addAll(aiStatistics.getMovablePositionsByTypeForPlayer(EMovableType.PIKEMAN_L3, playerId)); return situation; } private static class Situation { private final List<ShortPoint2D> swordsmenPositions = new Vector<>(); private final List<ShortPoint2D> bowmenPositions = new Vector<>(); private final List<ShortPoint2D> pikemenPositions = new Vector<>(); int getSoldiersCount() { return swordsmenPositions.size() + bowmenPositions.size() + pikemenPositions.size(); } } @Override public String toString() { return this.getClass().getName(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.optimizer.plan; import static org.apache.flink.optimizer.plan.PlanNode.SourceAndDamReport.FOUND_SOURCE; import static org.apache.flink.optimizer.plan.PlanNode.SourceAndDamReport.FOUND_SOURCE_AND_DAM; import static org.apache.flink.optimizer.plan.PlanNode.SourceAndDamReport.NOT_FOUND; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import org.apache.flink.api.common.operators.util.FieldList; import org.apache.flink.api.common.typeutils.TypeComparatorFactory; import org.apache.flink.optimizer.CompilerException; import org.apache.flink.optimizer.dag.OptimizerNode; import org.apache.flink.optimizer.dag.SingleInputNode; import org.apache.flink.runtime.operators.DamBehavior; import org.apache.flink.runtime.operators.DriverStrategy; import org.apache.flink.runtime.operators.shipping.ShipStrategyType; import org.apache.flink.util.Visitor; /** * */ public class SingleInputPlanNode extends PlanNode { protected final Channel input; protected final FieldList[] driverKeys; protected final boolean[][] driverSortOrders; private TypeComparatorFactory<?>[] comparators; public Object postPassHelper; // -------------------------------------------------------------------------------------------- public SingleInputPlanNode(OptimizerNode template, String nodeName, Channel input, DriverStrategy driverStrategy) { this(template, nodeName, input, driverStrategy, null, null); } public SingleInputPlanNode(OptimizerNode template, String nodeName, Channel input, DriverStrategy driverStrategy, FieldList driverKeyFields) { this(template, nodeName, input, driverStrategy, driverKeyFields, getTrueArray(driverKeyFields.size())); } public SingleInputPlanNode(OptimizerNode template, String nodeName, Channel input, DriverStrategy driverStrategy, FieldList driverKeyFields, boolean[] driverSortOrders) { super(template, nodeName, driverStrategy); this.input = input; this.comparators = new TypeComparatorFactory<?>[driverStrategy.getNumRequiredComparators()]; this.driverKeys = new FieldList[driverStrategy.getNumRequiredComparators()]; this.driverSortOrders = new boolean[driverStrategy.getNumRequiredComparators()][]; if(driverStrategy.getNumRequiredComparators() > 0) { this.driverKeys[0] = driverKeyFields; this.driverSortOrders[0] = driverSortOrders; } if (this.input.getShipStrategy() == ShipStrategyType.BROADCAST) { this.input.setReplicationFactor(getParallelism()); } final PlanNode predNode = input.getSource(); if (predNode.branchPlan != null && !predNode.branchPlan.isEmpty()) { if (this.branchPlan == null) { this.branchPlan = new HashMap<OptimizerNode, PlanNode>(); } this.branchPlan.putAll(predNode.branchPlan); } } // -------------------------------------------------------------------------------------------- public SingleInputNode getSingleInputNode() { if (this.template instanceof SingleInputNode) { return (SingleInputNode) this.template; } else { throw new RuntimeException(); } } /** * Gets the input channel to this node. * * @return The input channel to this node. */ public Channel getInput() { return this.input; } /** * Gets the predecessor of this node, i.e. the source of the input channel. * * @return The predecessor of this node. */ public PlanNode getPredecessor() { return this.input.getSource(); } /** * Sets the key field indexes for the specified driver comparator. * * @param keys The key field indexes for the specified driver comparator. * @param id The ID of the driver comparator. */ public void setDriverKeyInfo(FieldList keys, int id) { this.setDriverKeyInfo(keys, getTrueArray(keys.size()), id); } /** * Sets the key field information for the specified driver comparator. * * @param keys The key field indexes for the specified driver comparator. * @param sortOrder The key sort order for the specified driver comparator. * @param id The ID of the driver comparator. */ public void setDriverKeyInfo(FieldList keys, boolean[] sortOrder, int id) { if(id < 0 || id >= driverKeys.length) { throw new CompilerException("Invalid id for driver key information. DriverStrategy requires only " +super.getDriverStrategy().getNumRequiredComparators()+" comparators."); } this.driverKeys[id] = keys; this.driverSortOrders[id] = sortOrder; } /** * Gets the key field indexes for the specified driver comparator. * * @param id The id of the driver comparator for which the key field indexes are requested. * @return The key field indexes of the specified driver comparator. */ public FieldList getKeys(int id) { return this.driverKeys[id]; } /** * Gets the sort order for the specified driver comparator. * * @param id The id of the driver comparator for which the sort order is requested. * @return The sort order of the specified driver comparator. */ public boolean[] getSortOrders(int id) { return driverSortOrders[id]; } /** * Gets the specified comparator from this PlanNode. * * @param id The ID of the requested comparator. * * @return The specified comparator. */ public TypeComparatorFactory<?> getComparator(int id) { return comparators[id]; } /** * Sets the specified comparator for this PlanNode. * * @param comparator The comparator to set. * @param id The ID of the comparator to set. */ public void setComparator(TypeComparatorFactory<?> comparator, int id) { this.comparators[id] = comparator; } // -------------------------------------------------------------------------------------------- @Override public void accept(Visitor<PlanNode> visitor) { if (visitor.preVisit(this)) { this.input.getSource().accept(visitor); for (Channel broadcastInput : getBroadcastInputs()) { broadcastInput.getSource().accept(visitor); } visitor.postVisit(this); } } @Override public Iterable<PlanNode> getPredecessors() { if (getBroadcastInputs() == null || getBroadcastInputs().isEmpty()) { return Collections.singleton(this.input.getSource()); } else { List<PlanNode> preds = new ArrayList<PlanNode>(); preds.add(input.getSource()); for (Channel c : getBroadcastInputs()) { preds.add(c.getSource()); } return preds; } } @Override public Iterable<Channel> getInputs() { return Collections.singleton(this.input); } @Override public SourceAndDamReport hasDamOnPathDownTo(PlanNode source) { if (source == this) { return FOUND_SOURCE; } SourceAndDamReport res = this.input.getSource().hasDamOnPathDownTo(source); if (res == FOUND_SOURCE_AND_DAM) { return FOUND_SOURCE_AND_DAM; } else if (res == FOUND_SOURCE) { return (this.input.getLocalStrategy().dams() || this.input.getTempMode().breaksPipeline() || getDriverStrategy().firstDam() == DamBehavior.FULL_DAM) ? FOUND_SOURCE_AND_DAM : FOUND_SOURCE; } else { // NOT_FOUND // check the broadcast inputs for (NamedChannel nc : getBroadcastInputs()) { SourceAndDamReport bcRes = nc.getSource().hasDamOnPathDownTo(source); if (bcRes != NOT_FOUND) { // broadcast inputs are always dams return FOUND_SOURCE_AND_DAM; } } return NOT_FOUND; } } // -------------------------------------------------------------------------------------------- protected static boolean[] getTrueArray(int length) { final boolean[] a = new boolean[length]; for (int i = 0; i < length; i++) { a[i] = true; } return a; } }
/* ssdeep Copyright (C) 2006 ManTech International Corporation $Id: fuzzy.c 97 2010-03-19 15:10:06Z jessekornblum $ This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA The code in this file, and this file only, is based on SpamSum, part of the Samba project: http://www.samba.org/ftp/unpacked/junkcode/spamsum/ Because of where this file came from, any program that contains it must be licensed under the terms of the General Public License (GPL). See the file COPYING for details. The author's original comments about licensing are below: this is a checksum routine that is specifically designed for spam. Copyright Andrew Tridgell <tridge@samba.org> 2002 This code is released under the GNU General Public License version 2 or later. Alteratively, you may also use this code under the terms of the Perl Artistic license. If you wish to distribute this code under the terms of a different free software license then please ask me. If there is a good reason then I will probably say yes. */ package eu.scape_project.bitwiser.utils; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Arrays; import org.apache.commons.lang.StringUtils; /** * SSDeep * * <p> * A Java version of the ssdeep algorithm, based on the fuzzy.c source * code, taken from version 2.6 of the ssdeep package. * * <p> * Transliteration/port to Java from C by... * * @author Andrew Jackson <Andrew.Jackson@bl.uk> * */ public class SSDeep { /// Length of an individual fuzzy hash signature component public static final int SPAMSUM_LENGTH = 64; /// The longest possible length for a fuzzy hash signature (without the filename) public static final int FUZZY_MAX_RESULT = (SPAMSUM_LENGTH + (SPAMSUM_LENGTH/2 + 20)); public static final int MIN_BLOCKSIZE = 3; public static final int ROLLING_WINDOW = 7; public static final int HASH_PRIME = 0x01000193; public static final int HASH_INIT = 0x28021967; // Our input buffer when reading files to hash public static final int BUFFER_SIZE = 8192; static class roll_state_class { int[] window = new int[ROLLING_WINDOW]; int h1, h2, h3; int n; } private static roll_state_class rollState = new roll_state_class(); /* a rolling hash, based on the Adler checksum. By using a rolling hash we can perform auto resynchronisation after inserts/deletes internally, h1 is the sum of the bytes in the window and h2 is the sum of the bytes times the index h3 is a shift/xor based rolling hash, and is mostly needed to ensure that we can cope with large blocksize values */ static int rollHash(int c) { rollState.h2 -= rollState.h1; //roll_state.h2 = roll_state.h2 & 0x7fffffff rollState.h2 += ROLLING_WINDOW * c; //roll_state.h2 = roll_state.h2 & 0x7fffffff rollState.h1 += c; //roll_state.h1 = roll_state.h1 & 0x7fffffff rollState.h1 -= rollState.window[(rollState.n % ROLLING_WINDOW)]; //roll_state.h1 = roll_state.h1 & 0x7fffffff rollState.window[rollState.n % ROLLING_WINDOW] = (char)c; rollState.n = (rollState.n+1)%ROLLING_WINDOW; /* The original spamsum AND'ed this value with 0xFFFFFFFF which in theory should have no effect. This AND has been removed for performance (jk) */ rollState.h3 = (rollState.h3 << 5); rollState.h3 ^= c; //roll_state.h3 = roll_state.h3 & 0x7FFFFFFF //if( roll_state.h3 > 0xEFFFFFFF ) roll_state.h3 -= 0xEFFFFFFF long result = ((rollState.h1 + rollState.h2 + rollState.h3));//&0x7FFFFFFF return (int) result; } /* reset the state of the rolling hash and return the initial rolling hash value */ static void rollReset() { rollState.h1 = 0; rollState.h2 = 0; rollState.h3 = 0; rollState.n = 0; Arrays.fill(rollState.window,(char)0); } /* a simple non-rolling hash, based on the FNV hash */ static int sumHash(int c, int h) { h *= HASH_PRIME; //h = h & 0xFFFFFFFF h ^= c; //h = h & 0xFFFFFFFF return h; } private class ss_context { FuzzyHash ret; char[] p; long total_chars; int h, h2, h3; int j, k; int block_size; char[] ret2 = new char[SPAMSUM_LENGTH/2 + 1]; } static boolean ssInit(ss_context ctx, File handle) { if ( ctx == null ) { return true; } ctx.ret = new FuzzyHash(); if (handle != null) { ctx.total_chars = handle.length(); } ctx.block_size = MIN_BLOCKSIZE; while (ctx.block_size * SPAMSUM_LENGTH < ctx.total_chars) { ctx.block_size = ctx.block_size * 2; } return false; } static char[] b64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".toCharArray(); static void ssEngine(ss_context ctx, byte[] buffer, int bufferSize) { if (null == ctx || null == buffer) { return; } for ( int i = 0 ; i < bufferSize ; ++i) { /* at each character we update the rolling hash and the normal hash. When the rolling hash hits the reset value then we emit the normal hash as a element of the signature and reset both hashes */ ctx.h = rollHash(buffer[i]);// & 0x7FFFFFFF; ctx.h2 = sumHash(buffer[i], ctx.h2);// & 0x7FFFFFFF; ctx.h3 = sumHash(buffer[i], ctx.h3);// & 0x7FFFFFFF; if (((0xFFFFFFFFl & ctx.h) % ctx.block_size) == (ctx.block_size-1)) { /* we have hit a reset point. We now emit a hash which is based on all chacaters in the piece of the message between the last reset point and this one */ ctx.p[ctx.j] = b64[(int)((ctx.h2&0xFFFF) % 64)]; if (ctx.j < SPAMSUM_LENGTH-1) { /* we can have a problem with the tail overflowing. The easiest way to cope with this is to only reset the second hash if we have room for more characters in our signature. This has the effect of combining the last few pieces of the message into a single piece */ ctx.h2 = HASH_INIT; (ctx.j)++; } } /* this produces a second signature with a block size of block_size*2. By producing dual signatures in this way the effect of small changes in the message size near a block size boundary is greatly reduced. */ if (((0xFFFFFFFFl & ctx.h) % (ctx.block_size*2)) == ((ctx.block_size*2)-1)) { ctx.ret2[ctx.k] = b64[(int) (ctx.h3&0xFFFF % 64)]; if (ctx.k < SPAMSUM_LENGTH/2-1) { ctx.h3 = HASH_INIT; (ctx.k)++; } } } } static boolean ssUpdate(ss_context ctx, File handle) throws IOException { int bytesRead = 0; byte[] buffer; if (null == ctx || null == handle) { return true; } buffer = new byte[BUFFER_SIZE]; ctx.ret.blocksize = ctx.block_size; // ctx.p = ctx.ret + strlen(ctx.ret) ctx.p = new char[SPAMSUM_LENGTH]; //memset(ctx.p, 0, SPAMSUM_LENGTH+1) Arrays.fill(ctx.p, (char)0 ); //memset(ctx.ret2, 0, sizeof(ctx.ret2.length)) Arrays.fill(ctx.ret2, (char)0 ); ctx.k = ctx.j = 0; ctx.h3 = ctx.h2 = HASH_INIT; ctx.h = 0; rollReset(); FileInputStream in = new FileInputStream(handle); // while ((bytes_read = fread(buffer,sizeof(byte),BUFFER_SIZE,handle)) > 0) while (in.available() > 0 ) { bytesRead = in.read(buffer); ssEngine(ctx,buffer,bytesRead); } in.close(); if (ctx.h != 0) { ctx.p[ctx.j] = b64[(int) ((ctx.h2 & 0xFFFF) % 64)]; ctx.ret2[ctx.k] = b64[(int) ((ctx.h3 &0xFFFF) % 64)]; } ctx.ret.hash = new String(ctx.p); ctx.ret.hash2 = new String(ctx.ret2); return false; } /** * * @param handle * @return * @throws IOException */ public FuzzyHash fuzzyHashFile(File handle) throws IOException { ss_context ctx; boolean done = false; if (null == handle) { return null; } ctx = new ss_context(); ssInit(ctx, handle); ctx.ret.filename = handle.getPath(); while (!done) { ssUpdate(ctx,handle); // our blocksize guess may have been way off - repeat if necessary if (ctx.block_size > MIN_BLOCKSIZE && ctx.j < SPAMSUM_LENGTH/2) { ctx.block_size = ctx.block_size / 2; } else { done = true; } } return ctx.ret; } /** * * @param filename * @return * @throws IOException */ public FuzzyHash fuzzyHashFilename(String filename) throws IOException { if (null == filename) { return null; } File handle = new File(filename);//,"rb"); if (!handle.exists()) { return null; } return fuzzyHashFile(handle); } public FuzzyHash fuzzyHashBuf(byte[] buf, int bufLen) { ss_context ctx = new ss_context(); boolean done = false; if (buf == null) { return null; } ctx.total_chars = bufLen; ssInit(ctx, null); while (!done) { ctx.p = new char[SPAMSUM_LENGTH+1]; // TODO Duplication! ctx.k = ctx.j = 0; ctx.h3 = ctx.h2 = HASH_INIT; ctx.h = 0; rollReset(); ssEngine(ctx,buf,bufLen); /* our blocksize guess may have been way off - repeat if necessary */ if (ctx.block_size > MIN_BLOCKSIZE && ctx.j < SPAMSUM_LENGTH/2) { ctx.block_size = ctx.block_size / 2; } else { done = true; } if (ctx.h != 0) { ctx.p[ctx.j] = b64[(int) ((ctx.h2&0xFFFF) % 64)]; ctx.ret2[ctx.k] = b64[(int) ((ctx.h3&0xFFFF) % 64)]; } } ctx.ret = new FuzzyHash(ctx.block_size, String.valueOf(ctx.p), String.valueOf(ctx.ret2)); return ctx.ret; } /** * * @param buf * @return */ public FuzzyHash fuzzyHashBuf(byte[] buf) { return this.fuzzyHashBuf(buf, buf.length); } /* we only accept a match if we have at least one common substring in the signature of length ROLLING_WINDOW. This dramatically drops the false positive rate for low score thresholds while having negligable affect on the rate of spam detection. return 1 if the two strings do have a common substring, 0 otherwise */ static int hasCommonSubstring(char[] s1, char[] s2) { int i; long[] hashes = new long[SPAMSUM_LENGTH]; /* there are many possible algorithms for common substring detection. In this case I am re-using the rolling hash code to act as a filter for possible substring matches */ rollReset(); /* first compute the windowed rolling hash at each offset in the first string */ for (i=0;i < s1.length;i++) { hashes[i] = rollHash((char)s1[i]); } rollReset(); /* now for each offset in the second string compute the rolling hash and compare it to all of the rolling hashes for the first string. If one matches then we have a candidate substring match. We then confirm that match with a direct string comparison */ /*for (i=0;i < s2.length;i++) { long h = roll_hash((char)s2[i]); if (i < ROLLING_WINDOW-1) continue; for (j=ROLLING_WINDOW-1;j<num_hashes;j++) { if (hashes[j] != 0 && hashes[j] == h) { // we have a potential match - confirm it //FIXME if (strlen(s2+i-(ROLLING_WINDOW-1)) >= ROLLING_WINDOW && strncmp(s2+i-(ROLLING_WINDOW-1), s1+j-(ROLLING_WINDOW-1), ROLLING_WINDOW) == 0) { return 1; } } } }*/ return 0; } // eliminate sequences of longer than 3 identical characters. These // sequences contain very little information so they tend to just bias // the result unfairly static char[] eliminateSequences(String string) { char[] str = string.toCharArray(); StringBuffer ret = new StringBuffer(); // Do not include repeats: for (int i=3;i<str.length;i++) { if (str[i] != str[i-1] || str[i] != str[i-2] || str[i] != str[i-3]) { ret.append(str[i]); } } return ret.toString().toCharArray(); } /* this is the low level string scoring algorithm. It takes two strings and scores them on a scale of 0-100 where 0 is a terrible match and 100 is a great match. The block_size is used to cope with very small messages. */ static int scoreStrings(char[] s1, char[] s2, int blockSize) { int score = 0; int len1, len2; len1 = s1.length; len2 = s2.length; if (len1 > SPAMSUM_LENGTH || len2 > SPAMSUM_LENGTH) { /* not a real spamsum signature? */ return 0; } /* the two strings must have a common substring of length ROLLING_WINDOW to be candidates */ /* if (has_common_substring(s1, s2) == 0) { return 0; } */ /* compute the edit distance between the two strings. The edit distance gives us a pretty good idea of how closely related the two strings are */ score = StringUtils.getLevenshteinDistance(new String(s1), new String(s2)); /* scale the edit distance by the lengths of the two strings. This changes the score to be a measure of the proportion of the message that has changed rather than an absolute quantity. It also copes with the variability of the string lengths. */ score = (score * SPAMSUM_LENGTH) / (len1 + len2); /* at this stage the score occurs roughly on a 0-64 scale, * with 0 being a good match and 64 being a complete * mismatch */ /* rescale to a 0-100 scale (friendlier to humans) */ score = (100 * score) / 64; /* it is possible to get a score above 100 here, but it is a really terrible match */ if (score >= 100) { return 0; } /* now re-scale on a 0-100 scale with 0 being a poor match and 100 being a excellent match. */ score = 100 - score; // printf ("len1: %"PRIu32" len2: %"PRIu32"\n", len1, len2); /* when the blocksize is small we don't want to exaggerate the match size */ if (score > blockSize/MIN_BLOCKSIZE * Math.min(len1, len2)) { score = blockSize/MIN_BLOCKSIZE * Math.min(len1, len2); } return score; } /* given two spamsum strings return a value indicating the degree to which they match. */ static int fuzzyCompare(FuzzyHash fh1, FuzzyHash fh2) { int score = 0; char[] s11, s12; char[] s21, s22; // if the blocksizes don't match then we are comparing // apples to oranges. This isn't an 'error' per se. We could // have two valid signatures, but they can't be compared. if (fh1.blocksize != fh2.blocksize && fh1.blocksize != fh2.blocksize*2 && fh2.blocksize != fh1.blocksize*2) { return 0; } // there is very little information content is sequences of // the same character like 'LLLLL'. Eliminate any sequences // longer than 3. This is especially important when combined // with the has_common_substring() test below. s11 = eliminateSequences(fh1.hash); s21 = eliminateSequences(fh2.hash); s12 = eliminateSequences(fh1.hash2); s22 = eliminateSequences(fh2.hash2); // each signature has a string for two block sizes. We now // choose how to combine the two block sizes. We checked above // that they have at least one block size in common if (fh1.blocksize == fh2.blocksize) { int score1, score2; score1 = scoreStrings(s11, s21, fh1.blocksize); score2 = scoreStrings(s12, s22, fh2.blocksize); score = Math.min(score1, score2); } else if (fh1.blocksize == fh2.blocksize*2) { score = scoreStrings(s11, s22, fh1.blocksize); } else { score = scoreStrings(s12, s21, fh2.blocksize); } return (int)score; } }
/** Copyright 2008 University of Rochester Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package edu.ur.ir.web.action.institution; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import com.opensymphony.xwork2.ActionSupport; import edu.ur.ir.item.GenericItem; import edu.ur.ir.item.ItemSecurityService; import edu.ur.ir.item.ItemService; import edu.ur.ir.security.IrAcl; import edu.ur.ir.security.IrClassTypePermission; import edu.ur.ir.security.IrUserGroupAccessControlEntry; import edu.ur.ir.security.SecurityService; import edu.ur.ir.user.IrUserGroup; import edu.ur.ir.user.UserGroupService; /** * Manage group permissions on a item * * @author Sharmila Ranganathan * */ public class EditGroupPermissionsOnItem extends ActionSupport { /** eclipse generated id */ private static final long serialVersionUID = 398234971838172385L; /** id of the item */ private Long itemId; /** Id of the Institutional item */ private Long institutionalItemId; /** id of the group */ private Long groupId; /** Service to get user groups */ private UserGroupService userGroupService; /** security service */ private SecurityService securityService; /** Item security service*/ private ItemSecurityService itemSecurityService; /** Access control entries */ private Set<IrUserGroupAccessControlEntry> entries = new HashSet<IrUserGroupAccessControlEntry>(); /** User groups that can be added to a institutional item */ private List<IrUserGroup> userGroups = new LinkedList<IrUserGroup>(); /** Institutional item */ private GenericItem item; /** User group */ private IrUserGroup userGroup; /** Item service */ private ItemService itemService; /** set of ids for the permissions */ private Long[] permissionIds = new Long[]{}; /** Logger. */ private static final Logger log = LogManager.getLogger(EditGroupPermissionsOnItem.class); /** permissions that can be given to a item */ private List<IrClassTypePermission> permissions = new LinkedList<IrClassTypePermission>(); /** access control list for the item */ private IrAcl acl; /** Indicates whether to the Item is publicly viewable or not */ private boolean isPublic = false; /** * Loads the groups and item. * * @return */ public String addGroupsToItem() { item = itemService.getGenericItem(itemId, false); userGroups = userGroupService.getAllNameOrder(); acl = itemSecurityService.getAcl(item); loadPermissions(); return SUCCESS; } /** * Get the permissions for the specified group * * @return */ public String getGroupPermissions() { item = itemService.getGenericItem(itemId, false); userGroup = userGroupService.get(groupId, false); acl = itemSecurityService.getAcl(item); loadPermissions(); return SUCCESS; } /** * add the permissions to the user group */ public String addItemPermissionsToGroup() { log.debug("add permissions called"); item = itemService.getGenericItem(itemId, false); acl = itemSecurityService.getAcl(item); if(acl == null) { acl = itemSecurityService.createAcl(item); } userGroup = userGroupService.get(groupId, false); IrUserGroupAccessControlEntry entry = acl.getGroupAccessControlEntry(userGroup); if( entry != null ) { Set<IrClassTypePermission> currentPermissions = new HashSet<IrClassTypePermission>(); currentPermissions.addAll(entry.getIrClassTypePermissions()); for( IrClassTypePermission permission : currentPermissions) { entry.removePermission(permission); } } else { entry = acl.createGroupAccessControlEntry(userGroup); } log.debug("permission ids size = " + permissionIds.length); for(Long id : permissionIds) { IrClassTypePermission permission = securityService.getIrClassTypePermissionById(id, false); log.debug("adding permission " + permission); entry.addPermission(permission); } securityService.save(acl); entries = acl.getGroupEntries(); userGroups = userGroupService.getAllNameOrder(); return SUCCESS; } /** * Updates the view status for item file * * @return */ public String updateItemPublicView() { item = itemService.getGenericItem(itemId, false); item.setPubliclyViewable(isPublic); itemService.makePersistent(item); return SUCCESS; } /** * Update permissions on a group that already has permissions on a * item. * * @return */ public String updatePermissions() { log.debug("update permissions called"); item = itemService.getGenericItem(itemId, false); acl = itemSecurityService.getAcl(item); userGroup = userGroupService.get(groupId, false); IrUserGroupAccessControlEntry entry = acl.getGroupAccessControlEntry(userGroup); Set<IrClassTypePermission> currentPermissions = new HashSet<IrClassTypePermission>(); currentPermissions.addAll(entry.getIrClassTypePermissions()); for( IrClassTypePermission permission : currentPermissions) { entry.removePermission(permission); } log.debug("permission ids size = " + permissionIds.length); for(Long id : permissionIds) { IrClassTypePermission permission = securityService.getIrClassTypePermissionById(id, false); log.debug("adding permission " + permission); entry.addPermission(permission); } securityService.save(acl); entries = acl.getGroupEntries(); userGroups = userGroupService.getAllNameOrder(); return SUCCESS; } /** * Remove all permissions for the user group on the institutional item. * * @return */ public String removeGroupFromItem() { log.debug("add permissions called"); item = itemService.getGenericItem(itemId, false); userGroup = userGroupService.get(groupId, false); acl = itemSecurityService.removeGroupFromItemAcl(item, userGroup); entries = acl.getGroupEntries(); return SUCCESS; } /** * Load the permissions allowed for an institutional item */ private void loadPermissions() { permissions = itemSecurityService.getItemPermissions(); } public Long getItemId() { return itemId; } public void setItemId(Long itemId) { this.itemId = itemId; } public Long getGroupId() { return groupId; } public void setGroupId(Long groupId) { this.groupId = groupId; } public UserGroupService getUserGroupService() { return userGroupService; } public void setUserGroupService(UserGroupService userGroupService) { this.userGroupService = userGroupService; } public void setSecurityService(SecurityService securityService) { this.securityService = securityService; } public Set<IrUserGroupAccessControlEntry> getEntries() { return entries; } public void setEntries(Set<IrUserGroupAccessControlEntry> entries) { this.entries = entries; } public List<IrUserGroup> getUserGroups() { return userGroups; } public void setUserGroups(List<IrUserGroup> userGroups) { this.userGroups = userGroups; } public GenericItem getItem() { return item; } public void setItem(GenericItem item) { this.item = item; } public IrUserGroup getUserGroup() { return userGroup; } public void setUserGroup(IrUserGroup userGroup) { this.userGroup = userGroup; } public ItemService getItemService() { return itemService; } public void setItemService( ItemService itemService) { this.itemService = itemService; } public Long[] getPermissionIds() { return permissionIds; } public void setPermissionIds(Long[] permissionIds) { this.permissionIds = permissionIds; } public List<IrClassTypePermission> getPermissions() { return permissions; } public void setPermissions(List<IrClassTypePermission> permissions) { this.permissions = permissions; } public IrAcl getAcl() { return acl; } public void setAcl(IrAcl acl) { this.acl = acl; } public Long getInstitutionalItemId() { return institutionalItemId; } public void setInstitutionalItemId(Long institutionalItemId) { this.institutionalItemId = institutionalItemId; } public boolean isPublic() { return isPublic; } public void setPublic(boolean isPublic) { this.isPublic = isPublic; } public void setItemSecurityService(ItemSecurityService itemSecurityService) { this.itemSecurityService = itemSecurityService; } }
/* * #%L * ACS AEM Commons Bundle * %% * Copyright (C) 2013 Adobe * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.adobe.acs.commons.rewriter.impl; import java.io.IOException; import java.nio.charset.Charset; import java.util.Dictionary; import java.util.Hashtable; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.management.DynamicMBean; import javax.management.NotCompliantMBeanException; import javax.management.openmbean.CompositeType; import javax.management.openmbean.OpenDataException; import javax.management.openmbean.OpenType; import javax.management.openmbean.SimpleType; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletResponse; import ch.qos.logback.classic.Level; import ch.qos.logback.classic.turbo.TurboFilter; import ch.qos.logback.core.spi.FilterReply; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang.StringUtils; import org.apache.felix.scr.annotations.Activate; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Deactivate; import org.apache.felix.scr.annotations.Properties; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.sling.api.SlingConstants; import org.apache.sling.api.SlingHttpServletRequest; import org.apache.sling.api.SlingHttpServletResponse; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.commons.osgi.PropertiesUtil; import org.apache.sling.rewriter.ProcessingComponentConfiguration; import org.apache.sling.rewriter.ProcessingContext; import org.apache.sling.rewriter.Transformer; import org.apache.sling.rewriter.TransformerFactory; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceRegistration; import org.osgi.service.component.ComponentContext; import org.osgi.service.event.Event; import org.osgi.service.event.EventConstants; import org.osgi.service.event.EventHandler; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.Marker; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.AttributesImpl; import com.adobe.acs.commons.rewriter.AbstractTransformer; import com.adobe.acs.commons.util.impl.AbstractGuavaCacheMBean; import com.adobe.acs.commons.util.impl.GenericCacheMBean; import com.adobe.granite.ui.clientlibs.HtmlLibrary; import com.adobe.granite.ui.clientlibs.HtmlLibraryManager; import com.adobe.granite.ui.clientlibs.LibraryType; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; /** * ACS AEM Commons - Versioned Clientlibs (CSS/JS) Rewriter * Re-writes paths to CSS and JS clientlibs to include the md5 checksum as a " * selector; in the form: /path/to/clientlib.123456789.css or /path/to/clientlib.min.1234589.css (if minification is enabled) * If the Enforce MD5 filter is enabled, the paths will be like /path/to/clientlib.ACSHASH123456789.css or /path/to/clientlib.min.ACSHASH1234589.css (if minification is enabled) */ @Component(metatype = true, label = "ACS AEM Commons - Versioned Clientlibs Transformer Factory", description = "Sling Rewriter Transformer Factory to add auto-generated checksums to client library references") @Properties({ @Property(name = "pipeline.type", value = "versioned-clientlibs", propertyPrivate = true), @Property(name = EventConstants.EVENT_TOPIC, value = "com/adobe/granite/ui/librarymanager/INVALIDATED", propertyPrivate = true), @Property(name = "jmx.objectname", value = "com.adobe.acs.commons.rewriter:type=VersionedClientlibsTransformerMd5Cache", propertyPrivate = true) }) @Service(value = {DynamicMBean.class, TransformerFactory.class, EventHandler.class}) public final class VersionedClientlibsTransformerFactory extends AbstractGuavaCacheMBean<VersionedClientLibraryMd5CacheKey, String> implements TransformerFactory, EventHandler, GenericCacheMBean { private static final Logger log = LoggerFactory.getLogger(VersionedClientlibsTransformerFactory.class); private static final int DEFAULT_MD5_CACHE_SIZE = 300; private static final boolean DEFAULT_DISABLE_VERSIONING = false; private static final boolean DEFAULT_ENFORCE_MD5 = false; @Property(label="MD5 Cache Size", description="Maximum size of the md5 cache.", intValue = DEFAULT_MD5_CACHE_SIZE) private static final String PROP_MD5_CACHE_SIZE = "md5cache.size"; @Property(label="Disable Versioning", description="Should versioning of clientlibs be disabled", boolValue = DEFAULT_DISABLE_VERSIONING) private static final String PROP_DISABLE_VERSIONING = "disable.versioning"; @Property(label="Enforce MD5", description="Enables a filter which returns a 404 error if the MD5 in the request does not match the expected value", boolValue = DEFAULT_ENFORCE_MD5) private static final String PROP_ENFORCE_MD5 = "enforce.md5"; private static final String ATTR_JS_PATH = "src"; private static final String ATTR_CSS_PATH = "href"; private static final String MIN_SELECTOR = "min"; private static final String MIN_SELECTOR_SEGMENT = "." + MIN_SELECTOR; private static final String MD5_PREFIX = "ACSHASH"; // pattern used to parse paths in the filter - group 1 = path; group 2 = md5; group 3 = extension private static final Pattern FILTER_PATTERN = Pattern.compile("(.*?)\\.(?:min.)?" + MD5_PREFIX + "([a-zA-Z0-9]+)\\.(js|css)"); private static final String PROXY_PREFIX = "/etc.clientlibs/"; private Cache<VersionedClientLibraryMd5CacheKey, String> md5Cache; private boolean disableVersioning; private boolean enforceMd5; @Reference private HtmlLibraryManager htmlLibraryManager; private ServiceRegistration filterReg; public VersionedClientlibsTransformerFactory() throws NotCompliantMBeanException { super(GenericCacheMBean.class); } @Activate protected void activate(ComponentContext componentContext) { final BundleContext bundleContext = componentContext.getBundleContext(); final Dictionary<?, ?> props = componentContext.getProperties(); final int size = PropertiesUtil.toInteger(props.get(PROP_MD5_CACHE_SIZE), DEFAULT_MD5_CACHE_SIZE); this.md5Cache = CacheBuilder.newBuilder().recordStats().maximumSize(size).build(); this.disableVersioning = PropertiesUtil.toBoolean(props.get(PROP_DISABLE_VERSIONING), DEFAULT_DISABLE_VERSIONING); this.enforceMd5 = PropertiesUtil.toBoolean(props.get(PROP_ENFORCE_MD5), DEFAULT_ENFORCE_MD5); if (enforceMd5) { Dictionary<String, Object> filterProps = new Hashtable<String, Object>(); filterProps.put("sling.filter.scope", "REQUEST"); filterProps.put("service.ranking", Integer.valueOf(0)); filterReg = bundleContext.registerService(Filter.class.getName(), new BadMd5VersionedClientLibsFilter(), filterProps); } } @Deactivate protected void deactivate() { this.md5Cache = null; if (filterReg != null) { filterReg.unregister();; filterReg = null; } } public Transformer createTransformer() { return new VersionableClientlibsTransformer(); } private Attributes versionClientLibs(final String elementName, final Attributes attrs, final SlingHttpServletRequest request) { if (SAXElementUtils.isCSS(elementName, attrs)) { return this.rebuildAttributes(new AttributesImpl(attrs), attrs.getIndex("", ATTR_CSS_PATH), attrs.getValue("", ATTR_CSS_PATH), LibraryType.CSS, request); } else if (SAXElementUtils.isJavaScript(elementName, attrs)) { return this.rebuildAttributes(new AttributesImpl(attrs), attrs.getIndex("", ATTR_JS_PATH), attrs.getValue("", ATTR_JS_PATH), LibraryType.JS, request); } else { return attrs; } } private Attributes rebuildAttributes(final AttributesImpl newAttributes, final int index, final String path, final LibraryType libraryType, final SlingHttpServletRequest request) { final String contextPath = request.getContextPath(); String libraryPath = path; if (StringUtils.isNotBlank(contextPath)) { libraryPath = path.substring(contextPath.length()); } String versionedPath = this.getVersionedPath(libraryPath, libraryType, request.getResourceResolver()); if (StringUtils.isNotBlank(versionedPath)) { if(StringUtils.isNotBlank(contextPath)) { versionedPath = contextPath + versionedPath; } log.debug("Rewriting to: {}", versionedPath); newAttributes.setValue(index, versionedPath); } else { log.debug("Versioned Path could not be created properly"); } return newAttributes; } private String getVersionedPath(final String originalPath, final LibraryType libraryType, final ResourceResolver resourceResolver) { try { boolean appendMinSelector = false; String libraryPath = StringUtils.substringBeforeLast(originalPath, "."); if (libraryPath.endsWith(MIN_SELECTOR_SEGMENT)) { appendMinSelector = true; libraryPath = StringUtils.substringBeforeLast(libraryPath, "."); } final HtmlLibrary htmlLibrary = getLibrary(libraryType, libraryPath, resourceResolver); if (htmlLibrary != null) { StringBuilder builder = new StringBuilder(); builder.append(libraryPath); builder.append("."); if (appendMinSelector) { builder.append(MIN_SELECTOR).append("."); } if (enforceMd5) { builder.append(MD5_PREFIX); } builder.append(getMd5(htmlLibrary)); builder.append(libraryType.extension); return builder.toString(); } else { log.debug("Could not find HtmlLibrary at path: {}", libraryPath); return null; } } catch (Exception ex) { // Handle unexpected formats of the original path log.error("Attempting to get a versioned path for [ {} ] but could not because of: {}", originalPath, ex.getMessage()); return originalPath; } } private HtmlLibrary getLibrary(LibraryType libraryType, String libraryPath, ResourceResolver resourceResolver) { HtmlLibrary htmlLibrary = null; if (libraryPath.startsWith(PROXY_PREFIX)) { final String relativePath = libraryPath.substring(PROXY_PREFIX.length()); for (final String prefix : resourceResolver.getSearchPath()) { final String absolutePath = prefix + relativePath; htmlLibrary = htmlLibraryManager.getLibrary(libraryType, absolutePath); if (htmlLibrary != null) { break; } } } else { htmlLibrary = htmlLibraryManager.getLibrary(libraryType, libraryPath); } return htmlLibrary; } @Nonnull private String getMd5(@Nonnull final HtmlLibrary htmlLibrary) throws IOException, ExecutionException { return md5Cache.get(new VersionedClientLibraryMd5CacheKey(htmlLibrary), new Callable<String>() { @Override public String call() throws Exception { return calculateMd5(htmlLibrary); } }); } @Nonnull private String calculateMd5(@Nonnull final HtmlLibrary htmlLibrary) throws IOException { return DigestUtils.md5Hex(htmlLibrary.getInputStream()); } private class VersionableClientlibsTransformer extends AbstractTransformer { private SlingHttpServletRequest request; @Override public void init(ProcessingContext context, ProcessingComponentConfiguration config) throws IOException { super.init(context, config); this.request = context.getRequest(); } public void startElement(final String namespaceURI, final String localName, final String qName, final Attributes attrs) throws SAXException { final Attributes nextAttributes; if (disableVersioning) { nextAttributes = attrs; } else { nextAttributes = versionClientLibs(localName, attrs, request); } getContentHandler().startElement(namespaceURI, localName, qName, nextAttributes); } } @Override public void handleEvent(Event event) { String path = (String) event.getProperty(SlingConstants.PROPERTY_PATH); md5Cache.invalidate(new VersionedClientLibraryMd5CacheKey(path, LibraryType.JS)); md5Cache.invalidate(new VersionedClientLibraryMd5CacheKey(path, LibraryType.CSS)); } @Override protected Cache<VersionedClientLibraryMd5CacheKey, String> getCache() { return md5Cache; } @Override protected long getBytesLength(String cacheObj) { return cacheObj.getBytes(Charset.forName("UTF-8")).length; } @Override protected void addCacheData(Map<String, Object> data, String cacheObj) { data.put("Value", cacheObj); } @Override protected String toString(String cacheObj) throws Exception { return cacheObj; } @Override protected CompositeType getCacheEntryType() throws OpenDataException { return new CompositeType("Cache Entry", "Cache Entry", new String[] { "Cache Key", "Value" }, new String[] { "Cache Key", "Value" }, new OpenType[] { SimpleType.STRING, SimpleType.STRING }); } @Nonnull UriInfo getUriInfo(@Nullable final String uri, @Nonnull ResourceResolver resourceResolver) { if (uri != null) { Matcher matcher = FILTER_PATTERN.matcher(uri); if (matcher.matches()) { final String libraryPath = matcher.group(1); final String md5 = matcher.group(2); final String extension = matcher.group(3); LibraryType libraryType; if (LibraryType.CSS.extension.substring(1).equals(extension)) { libraryType = LibraryType.CSS; } else { libraryType = LibraryType.JS; } final HtmlLibrary htmlLibrary = getLibrary(libraryType, libraryPath, resourceResolver); return new UriInfo(libraryPath + "." + extension, md5, libraryType, htmlLibrary); } } return new UriInfo("", "", null, null); } class BadMd5VersionedClientLibsFilter implements Filter { @Override public void doFilter(final ServletRequest request, final ServletResponse response, final FilterChain filterChain) throws IOException, ServletException { if (request instanceof SlingHttpServletRequest && response instanceof SlingHttpServletResponse) { final SlingHttpServletRequest slingRequest = (SlingHttpServletRequest) request; final SlingHttpServletResponse slingResponse = (SlingHttpServletResponse) response; String uri = slingRequest.getRequestURI(); UriInfo uriInfo = getUriInfo(uri, slingRequest.getResourceResolver()); if (uriInfo.cacheKey != null) { if ("".equals(uriInfo.md5)) { log.debug("MD5 is blank for '{}' in Versioned ClientLibs cache, allowing {} to pass", uriInfo.cleanedUri, uri); filterChain.doFilter(request, response); return; } String md5FromCache = null; try { md5FromCache = getCacheEntry(uriInfo.cacheKey); } catch (Exception e) { md5FromCache = null; } // this static value "Invalid cache key parameter." happens when the cache key can't be // found in the cache if ("Invalid cache key parameter.".equals(md5FromCache)) { md5FromCache = calculateMd5(uriInfo.htmlLibrary); } if (md5FromCache == null) { // something went bad during the cache access log.warn("Failed to fetch data from Versioned ClientLibs cache, allowing {} to pass", uri); filterChain.doFilter(request, response); } else { // the file is in the cache, compare the md5 from cache with the one in the request if (md5FromCache.equals(uriInfo.md5)) { log.debug("MD5 equals for '{}' in Versioned ClientLibs cache, allowing {} to pass", uriInfo.cleanedUri, uri); filterChain.doFilter(request, response); } else { log.info("MD5 differs for '{}' in Versioned ClientLibs cache. Expected {}. Sending 404 for '{}'", new Object[] { uriInfo.cleanedUri, md5FromCache, uri }); slingResponse.sendError(HttpServletResponse.SC_NOT_FOUND); } } } else { filterChain.doFilter(request, response); } } else { filterChain.doFilter(request, response); } } @Override public void init(final FilterConfig filterConfig) throws ServletException {} @Override public void destroy() {} } static class UriInfo { private final String cleanedUri; private final String md5; private final LibraryType libraryType; private final HtmlLibrary htmlLibrary; private final String cacheKey; UriInfo(String cleanedUri, String md5, LibraryType libraryType, HtmlLibrary htmlLibrary) { this.cleanedUri = cleanedUri; this.md5 = md5; this.libraryType = libraryType; this.htmlLibrary = htmlLibrary; if (libraryType != null && htmlLibrary != null) { cacheKey = htmlLibrary.getLibraryPath() + libraryType.extension; } else { cacheKey = null; } } } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.dmn.core.assembler; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.namespace.QName; import org.drools.compiler.builder.impl.KnowledgeBuilderImpl; import org.drools.compiler.compiler.PackageRegistry; import org.drools.compiler.lang.descr.PackageDescr; import org.drools.core.definitions.InternalKnowledgePackage; import org.kie.api.internal.assembler.KieAssemblerService; import org.kie.api.internal.io.ResourceTypePackage; import org.kie.api.io.Resource; import org.kie.api.io.ResourceConfiguration; import org.kie.api.io.ResourceType; import org.kie.api.io.ResourceWithConfiguration; import org.kie.dmn.api.core.DMNCompiler; import org.kie.dmn.api.core.DMNCompilerConfiguration; import org.kie.dmn.api.core.DMNModel; import org.kie.dmn.api.marshalling.v1_1.DMNMarshaller; import org.kie.dmn.core.api.DMNFactory; import org.kie.dmn.core.compiler.DMNCompilerConfigurationImpl; import org.kie.dmn.core.compiler.DMNCompilerImpl; import org.kie.dmn.core.compiler.DMNProfile; import org.kie.dmn.core.compiler.ImportDMNResolverUtil; import org.kie.dmn.core.compiler.ImportDMNResolverUtil.ImportType; import org.kie.dmn.core.compiler.profiles.ExtendedDMNProfile; import org.kie.dmn.core.impl.DMNKnowledgeBuilderError; import org.kie.dmn.core.impl.DMNPackageImpl; import org.kie.dmn.feel.util.Either; import org.kie.dmn.model.v1_1.Definitions; import org.kie.dmn.model.v1_1.Import; import org.kie.internal.builder.ResultSeverity; import org.kie.internal.utils.ChainedProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DMNAssemblerService implements KieAssemblerService { private static final Logger logger = LoggerFactory.getLogger( DMNAssemblerService.class ); public static final String ORG_KIE_DMN_PREFIX = "org.kie.dmn"; public static final String DMN_PROFILE_PREFIX = ORG_KIE_DMN_PREFIX + ".profiles."; public static final String DMN_COMPILER_CACHE_KEY = "DMN_COMPILER_CACHE_KEY"; public static final String DMN_PROFILES_CACHE_KEY = "DMN_PROFILES_CACHE_KEY"; @Override public ResourceType getResourceType() { return ResourceType.DMN; } @Override public void addResources(Object kbuilder, Collection<ResourceWithConfiguration> resources, ResourceType type) throws Exception { KnowledgeBuilderImpl kbuilderImpl = (KnowledgeBuilderImpl) kbuilder; DMNCompilerImpl dmnCompiler = (DMNCompilerImpl) kbuilderImpl.getCachedOrCreate(DMN_COMPILER_CACHE_KEY, () -> getCompiler(kbuilderImpl)); DMNMarshaller dmnMarshaller = dmnCompiler.getMarshaller(); if (resources.size() == 1) { // quick path: internalAddResource(kbuilderImpl, dmnCompiler, resources.iterator().next(), Collections.emptyList()); return; } List<DMNResource> dmnResources = new ArrayList<>(); for (ResourceWithConfiguration r : resources) { Definitions definitions = dmnMarshaller.unmarshal(r.getResource().getReader()); QName modelID = new QName(definitions.getNamespace(), definitions.getName()); DMNResource dmnResource = new DMNResource(modelID, r, definitions); dmnResources.add(dmnResource); } // enrich with imports for (DMNResource r : dmnResources) { for (Import i : r.getDefinitions().getImport()) { if (ImportDMNResolverUtil.whichImportType(i) == ImportType.DMN) { Either<String, DMNResource> resolvedResult = ImportDMNResolverUtil.resolveImportDMN(i, dmnResources, DMNResource::getModelID); DMNResource located = resolvedResult.getOrElseThrow(RuntimeException::new); r.addDependency(located.getModelID()); } } } List<DMNResource> sortedDmnResources = DMNResourceDependenciesSorter.sort(dmnResources); Collection<DMNModel> dmnModels = new ArrayList<>(); for (DMNResource dmnRes : sortedDmnResources) { DMNModel dmnModel = internalAddResource(kbuilderImpl, dmnCompiler, dmnRes.getResAndConfig(), dmnModels); dmnModels.add(dmnModel); } } private DMNModel internalAddResource(KnowledgeBuilderImpl kbuilder, DMNCompiler dmnCompiler, ResourceWithConfiguration r, Collection<DMNModel> dmnModels) throws Exception { r.getBeforeAdd().accept(kbuilder); DMNModel dmnModel = compileResourceToModel(kbuilder, dmnCompiler, r.getResource(), dmnModels); r.getAfterAdd().accept(kbuilder); return dmnModel; } @Override public void addResource(Object kbuilder, Resource resource, ResourceType type, ResourceConfiguration configuration) throws Exception { logger.warn("invoked legacy addResource (no control on the order of the assembler compilation): " + resource.getSourcePath()); KnowledgeBuilderImpl kbuilderImpl = (KnowledgeBuilderImpl) kbuilder; DMNCompiler dmnCompiler = kbuilderImpl.getCachedOrCreate( DMN_COMPILER_CACHE_KEY, () -> getCompiler( kbuilderImpl ) ); Collection<DMNModel> dmnModels = new ArrayList<>(); for (PackageRegistry pr : kbuilderImpl.getPackageRegistry().values()) { ResourceTypePackage resourceTypePackage = pr.getPackage().getResourceTypePackages().get(ResourceType.DMN); if (resourceTypePackage != null) { DMNPackageImpl dmnpkg = (DMNPackageImpl) resourceTypePackage; dmnModels.addAll(dmnpkg.getAllModels().values()); } } compileResourceToModel(kbuilderImpl, dmnCompiler, resource, dmnModels); } private DMNModel compileResourceToModel(KnowledgeBuilderImpl kbuilderImpl, DMNCompiler dmnCompiler, Resource resource, Collection<DMNModel> dmnModels) { DMNModel model = dmnCompiler.compile(resource, dmnModels); if( model != null ) { String namespace = model.getNamespace(); PackageRegistry pkgReg = kbuilderImpl.getOrCreatePackageRegistry( new PackageDescr( namespace ) ); InternalKnowledgePackage kpkgs = pkgReg.getPackage(); kpkgs.addCloningResource( DMN_COMPILER_CACHE_KEY, dmnCompiler ); Map<ResourceType, ResourceTypePackage> rpkg = kpkgs.getResourceTypePackages(); DMNPackageImpl dmnpkg = (DMNPackageImpl) rpkg.get( ResourceType.DMN ); if ( dmnpkg == null ) { dmnpkg = new DMNPackageImpl( namespace ); rpkg.put(ResourceType.DMN, dmnpkg); } else { if ( dmnpkg.getModel( model.getName() ) != null ) { kbuilderImpl.addBuilderResult(new DMNKnowledgeBuilderError(ResultSeverity.ERROR, resource, namespace, "Duplicate model name " + model.getName() + " in namespace " + namespace)); logger.error( "Duplicate model name {} in namespace {}", model.getName(), namespace ); } } dmnpkg.addModel( model.getName(), model ); dmnpkg.addProfiles(kbuilderImpl.getCachedOrCreate(DMN_PROFILES_CACHE_KEY, () -> getDMNProfiles(kbuilderImpl))); } else { kbuilderImpl.addBuilderResult(new DMNKnowledgeBuilderError(ResultSeverity.ERROR, resource, "Unable to compile DMN model for the resource")); logger.error( "Unable to compile DMN model for resource {}", resource.getSourcePath() ); } return model; } private List<DMNProfile> getDMNProfiles(KnowledgeBuilderImpl kbuilderImpl) { ChainedProperties chainedProperties = kbuilderImpl.getBuilderConfiguration().getChainedProperties(); List<DMNProfile> dmnProfiles = new ArrayList<>(); dmnProfiles.addAll(getDefaultDMNProfiles(chainedProperties)); Map<String, String> dmnProfileProperties = new HashMap<>(); chainedProperties.mapStartsWith(dmnProfileProperties, DMN_PROFILE_PREFIX, false); if (!dmnProfileProperties.isEmpty()) { try { for (Map.Entry<String, String> dmnProfileProperty : dmnProfileProperties.entrySet()) { DMNProfile dmnProfile = (DMNProfile) kbuilderImpl.getRootClassLoader() .loadClass(dmnProfileProperty.getValue()).newInstance(); dmnProfiles.add(dmnProfile); } return dmnProfiles; } catch (Exception e) { kbuilderImpl.addBuilderResult(new DMNKnowledgeBuilderError(ResultSeverity.WARNING, "Trying to load a non-existing Kie DMN profile " + e.getLocalizedMessage())); logger.error("Trying to load a non-existing Kie DMN profile {}", e.getLocalizedMessage(), e); kbuilderImpl.addBuilderResult(new DMNKnowledgeBuilderError(ResultSeverity.WARNING, "DMN Compiler configuration contained errors, will fall-back using empty-configuration compiler.")); logger.warn("DMN Compiler configuration contained errors, will fall-back using empty-configuration compiler."); } } return dmnProfiles; } public static List<DMNProfile> getDefaultDMNProfiles(ChainedProperties properties) { if (!isStrictMode(properties)) { return Arrays.asList(new ExtendedDMNProfile()); } else { return Collections.emptyList(); } } public static boolean isStrictMode(ChainedProperties properties) { String val = properties.getProperty("org.kie.dmn.strictConformance", "false"); return "".equals(val) || Boolean.parseBoolean(val); } private DMNCompiler getCompiler(KnowledgeBuilderImpl kbuilderImpl) { List<DMNProfile> dmnProfiles = kbuilderImpl.getCachedOrCreate(DMN_PROFILES_CACHE_KEY, () -> getDMNProfiles(kbuilderImpl)); DMNCompilerConfiguration compilerConfig = compilerConfigWithKModulePrefs(kbuilderImpl.getRootClassLoader(), kbuilderImpl.getBuilderConfiguration().getChainedProperties(), dmnProfiles); return DMNFactory.newCompiler(compilerConfig); } /** * Returns a DMNCompilerConfiguration with the specified properties set, and applying the explicited dmnProfiles. * @param classLoader * @param chainedProperties applies properties --it does not do any classloading nor profile loading based on these properites, just passes the values. * @param dmnProfiles applies these DMNProfile(s) to the DMNCompilerConfiguration * @return */ public static DMNCompilerConfiguration compilerConfigWithKModulePrefs(ClassLoader classLoader, ChainedProperties chainedProperties, List<DMNProfile> dmnProfiles) { DMNCompilerConfigurationImpl config = (DMNCompilerConfigurationImpl) DMNFactory.newCompilerConfiguration(); config.setRootClassLoader(classLoader); Map<String, String> dmnPrefs = new HashMap<>(); chainedProperties.mapStartsWith(dmnPrefs, ORG_KIE_DMN_PREFIX, true); config.setProperties(dmnPrefs); for (DMNProfile dmnProfile : dmnProfiles) { config.addExtensions(dmnProfile.getExtensionRegisters()); config.addDRGElementCompilers(dmnProfile.getDRGElementCompilers()); config.addFEELProfile(dmnProfile); } return config; } }
// Copyright (C) 2011 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.git; import static org.eclipse.jgit.lib.ObjectIdSerialization.readNotNull; import static org.eclipse.jgit.lib.ObjectIdSerialization.writeNotNull; import com.google.gerrit.reviewdb.client.PatchSet; import com.google.gerrit.reviewdb.client.Project; import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectIdOwnerMap; import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.RefDatabase; import org.eclipse.jgit.lib.Repository; import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevSort; import org.eclipse.jgit.revwalk.RevWalk; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.BitSet; import java.util.HashMap; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; class TagSet { private static final Logger log = LoggerFactory.getLogger(TagSet.class); private final Project.NameKey projectName; private final Map<String, CachedRef> refs; private final ObjectIdOwnerMap<Tag> tags; TagSet(Project.NameKey projectName) { this.projectName = projectName; this.refs = new HashMap<>(); this.tags = new ObjectIdOwnerMap<>(); } Tag lookupTag(AnyObjectId id) { return tags.get(id); } boolean updateFastForward(String refName, ObjectId oldValue, ObjectId newValue) { CachedRef ref = refs.get(refName); if (ref != null) { // compareAndSet works on reference equality, but this operation // wants to use object equality. Switch out oldValue with cur so the // compareAndSet will function correctly for this operation. // ObjectId cur = ref.get(); if (cur.equals(oldValue)) { return ref.compareAndSet(cur, newValue); } } return false; } void prepare(TagMatcher m) { @SuppressWarnings("resource") RevWalk rw = null; try { for (Ref currentRef : m.include) { if (currentRef.isSymbolic()) { continue; } if (currentRef.getObjectId() == null) { continue; } CachedRef savedRef = refs.get(currentRef.getName()); if (savedRef == null) { // If the reference isn't known to the set, return null // and force the caller to rebuild the set in a new copy. m.newRefs.add(currentRef); continue; } // The reference has not been moved. It can be used as-is. ObjectId savedObjectId = savedRef.get(); if (currentRef.getObjectId().equals(savedObjectId)) { m.mask.set(savedRef.flag); continue; } // Check on-the-fly to see if the branch still reaches the tag. // This is very likely for a branch that fast-forwarded. try { if (rw == null) { rw = new RevWalk(m.db); rw.setRetainBody(false); } RevCommit savedCommit = rw.parseCommit(savedObjectId); RevCommit currentCommit = rw.parseCommit(currentRef.getObjectId()); if (rw.isMergedInto(savedCommit, currentCommit)) { // Fast-forward. Safely update the reference in-place. savedRef.compareAndSet(savedObjectId, currentRef.getObjectId()); m.mask.set(savedRef.flag); continue; } // The branch rewound. Walk the list of commits removed from // the reference. If any matches to a tag, this has to be removed. boolean err = false; rw.reset(); rw.markStart(savedCommit); rw.markUninteresting(currentCommit); rw.sort(RevSort.TOPO, true); RevCommit c; while ((c = rw.next()) != null) { Tag tag = tags.get(c); if (tag != null && tag.refFlags.get(savedRef.flag)) { m.lostRefs.add(new TagMatcher.LostRef(tag, savedRef.flag)); err = true; } } if (!err) { // All of the tags are still reachable. Update in-place. savedRef.compareAndSet(savedObjectId, currentRef.getObjectId()); m.mask.set(savedRef.flag); } } catch (IOException err) { // Defer a cache update until later. No conclusion can be made // based on an exception reading from the repository storage. log.warn("Error checking tags of " + projectName, err); } } } finally { if (rw != null) { rw.close(); } } } void build(Repository git, TagSet old, TagMatcher m) { if (old != null && m != null && refresh(old, m)) { return; } try (TagWalk rw = new TagWalk(git)) { rw.setRetainBody(false); for (Ref ref : git.getRefDatabase().getRefs(RefDatabase.ALL).values()) { if (skip(ref)) { continue; } else if (isTag(ref)) { // For a tag, remember where it points to. addTag(rw, git.peel(ref)); } else { // New reference to include in the set. addRef(rw, ref); } } // Traverse the complete history. Copy any flags from a commit to // all of its ancestors. This automatically updates any Tag object // as the TagCommit and the stored Tag object share the same // underlying bit set. TagCommit c; while ((c = (TagCommit) rw.next()) != null) { BitSet mine = c.refFlags; int pCnt = c.getParentCount(); for (int pIdx = 0; pIdx < pCnt; pIdx++) { ((TagCommit) c.getParent(pIdx)).refFlags.or(mine); } } } catch (IOException e) { log.warn("Error building tags for repository " + projectName, e); } } void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { int refCnt = in.readInt(); for (int i = 0; i < refCnt; i++) { String name = in.readUTF(); int flag = in.readInt(); ObjectId id = readNotNull(in); refs.put(name, new CachedRef(flag, id)); } int tagCnt = in.readInt(); for (int i = 0; i < tagCnt; i++) { ObjectId id = readNotNull(in); BitSet flags = (BitSet) in.readObject(); tags.add(new Tag(id, flags)); } } void writeObject(ObjectOutputStream out) throws IOException { out.writeInt(refs.size()); for (Map.Entry<String, CachedRef> e : refs.entrySet()) { out.writeUTF(e.getKey()); out.writeInt(e.getValue().flag); writeNotNull(out, e.getValue().get()); } out.writeInt(tags.size()); for (Tag tag : tags) { writeNotNull(out, tag); out.writeObject(tag.refFlags); } } private boolean refresh(TagSet old, TagMatcher m) { if (m.newRefs.isEmpty()) { // No new references is a simple update. Copy from the old set. copy(old, m); return true; } // Only permit a refresh if all new references start from the tip of // an existing references. This happens some of the time within a // Gerrit Code Review server, perhaps about 50% of new references. // Since a complete rebuild is so costly, try this approach first. Map<ObjectId, Integer> byObj = new HashMap<>(); for (CachedRef r : old.refs.values()) { ObjectId id = r.get(); if (!byObj.containsKey(id)) { byObj.put(id, r.flag); } } for (Ref newRef : m.newRefs) { ObjectId id = newRef.getObjectId(); if (id == null || refs.containsKey(newRef.getName())) { continue; } else if (!byObj.containsKey(id)) { return false; } } copy(old, m); for (Ref newRef : m.newRefs) { ObjectId id = newRef.getObjectId(); if (id == null || refs.containsKey(newRef.getName())) { continue; } int srcFlag = byObj.get(id); int newFlag = refs.size(); refs.put(newRef.getName(), new CachedRef(newRef, newFlag)); for (Tag tag : tags) { if (tag.refFlags.get(srcFlag)) { tag.refFlags.set(newFlag); } } } return true; } private void copy(TagSet old, TagMatcher m) { refs.putAll(old.refs); for (Tag srcTag : old.tags) { BitSet mine = new BitSet(); mine.or(srcTag.refFlags); tags.add(new Tag(srcTag, mine)); } for (TagMatcher.LostRef lost : m.lostRefs) { Tag mine = tags.get(lost.tag); if (mine != null) { mine.refFlags.clear(lost.flag); } } } private void addTag(TagWalk rw, Ref ref) { ObjectId id = ref.getPeeledObjectId(); if (id == null) { id = ref.getObjectId(); } if (!tags.contains(id)) { BitSet flags; try { flags = ((TagCommit) rw.parseCommit(id)).refFlags; } catch (IncorrectObjectTypeException notCommit) { flags = new BitSet(); } catch (IOException e) { log.warn("Error on " + ref.getName() + " of " + projectName, e); flags = new BitSet(); } tags.add(new Tag(id, flags)); } } private void addRef(TagWalk rw, Ref ref) { try { TagCommit commit = (TagCommit) rw.parseCommit(ref.getObjectId()); rw.markStart(commit); int flag = refs.size(); commit.refFlags.set(flag); refs.put(ref.getName(), new CachedRef(ref, flag)); } catch (IncorrectObjectTypeException notCommit) { // No need to spam the logs. // Quite many refs will point to non-commits. // For instance, refs from refs/cache-automerge // will often end up here. } catch (IOException e) { log.warn("Error on " + ref.getName() + " of " + projectName, e); } } static boolean skip(Ref ref) { return ref.isSymbolic() || ref.getObjectId() == null || PatchSet.isChangeRef(ref.getName()); } private static boolean isTag(Ref ref) { return ref.getName().startsWith(Constants.R_TAGS); } static final class Tag extends ObjectIdOwnerMap.Entry { private final BitSet refFlags; Tag(AnyObjectId id, BitSet flags) { super(id); this.refFlags = flags; } boolean has(BitSet mask) { return refFlags.intersects(mask); } } private static final class CachedRef extends AtomicReference<ObjectId> { private static final long serialVersionUID = 1L; final int flag; CachedRef(Ref ref, int flag) { this(flag, ref.getObjectId()); } CachedRef(int flag, ObjectId id) { this.flag = flag; set(id); } } private static final class TagWalk extends RevWalk { TagWalk(Repository git) { super(git); } @Override protected TagCommit createCommit(AnyObjectId id) { return new TagCommit(id); } } private static final class TagCommit extends RevCommit { final BitSet refFlags; TagCommit(AnyObjectId id) { super(id); refFlags = new BitSet(); } } }
/* * Copyright 2016 Sam Sun <me@samczsun.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.javadeobfuscator.deobfuscator.transformers.normalizer; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.objectweb.asm.Type; import org.objectweb.asm.tree.ClassNode; import org.objectweb.asm.tree.FieldNode; import org.objectweb.asm.tree.MethodNode; import com.javadeobfuscator.deobfuscator.config.TransformerConfig; import com.javadeobfuscator.deobfuscator.executor.Context; import com.javadeobfuscator.deobfuscator.executor.defined.types.JavaClass; import com.javadeobfuscator.deobfuscator.executor.defined.types.JavaMethod; import com.javadeobfuscator.deobfuscator.executor.providers.DelegatingProvider; import com.javadeobfuscator.deobfuscator.utils.ClassTree; @TransformerConfig.ConfigOptions(configClass = DuplicateRenamer.Config.class) public class DuplicateRenamer extends AbstractNormalizer<DuplicateRenamer.Config> { @Override public void remap(CustomRemapper remapper) { // Aggressive class name obfuscation Map<String, AtomicInteger> names = new HashMap<>(); classNodes().forEach(classNode -> { String classNodeName = classNode.name; if(!names.containsKey(classNodeName.toLowerCase(Locale.ROOT))) names.put(classNodeName.toLowerCase(Locale.ROOT), new AtomicInteger()); else { String newName = classNodeName; do { newName = newName + "_" + names.get(classNodeName.toLowerCase(Locale.ROOT)) .getAndIncrement(); } while (!remapper.map(classNode.name, newName)); } }); List<MethodArgs> methodNames = new ArrayList<>(); // Aggressive method name obfuscation (same method name + same params) AtomicInteger methodIdNow = new AtomicInteger(); classNodes().forEach(classNode -> { Set<String> allClasses = new HashSet<>(); ClassTree tree = this.getDeobfuscator().getClassTree(classNode.name); Set<String> tried = new HashSet<>(); LinkedList<String> toTry = new LinkedList<>(); toTry.add(tree.thisClass); while (!toTry.isEmpty()) { String t = toTry.poll(); if (tried.add(t) && !t.equals("java/lang/Object")) { ClassNode cn = this.getDeobfuscator().assureLoaded(t); ClassTree ct = this.getDeobfuscator().getClassTree(t); allClasses.add(t); allClasses.addAll(ct.parentClasses); toTry.addAll(ct.parentClasses); allClasses.addAll(ct.subClasses); toTry.addAll(ct.subClasses); } } allClasses.remove(tree.thisClass); for (MethodNode methodNode : new ArrayList<>(classNode.methods)) { if (methodNode.name.startsWith("<")) continue; if (methodNode.name.equals("main")) continue; final Map<Map.Entry<ClassNode, MethodNode>, Boolean> allMethodNodes = new HashMap<>(); final Type methodType = Type.getReturnType(methodNode.desc); final AtomicBoolean isLibrary = new AtomicBoolean(false); if (methodType.getSort() != Type.OBJECT && methodType.getSort() != Type.ARRAY) { if (methodType.getSort() == Type.METHOD) { throw new IllegalArgumentException("Did not expect method"); } allClasses.stream().map(name -> this.getDeobfuscator().assureLoaded(name)).forEach(node -> { boolean foundSimilar = false; boolean equals = false; MethodNode equalsMethod = null; for (MethodNode method : node.methods) { Type thisType = Type.getMethodType(methodNode.desc); Type otherType = Type.getMethodType(method.desc); if (methodNode.name.equals(method.name) && Arrays.equals(thisType.getArgumentTypes(), otherType.getArgumentTypes())) { foundSimilar = true; if (thisType.getReturnType().getSort() == otherType.getReturnType().getSort()) { equals = true; equalsMethod = method; } } } if (foundSimilar) { if (equals) { allMethodNodes.put(new AbstractMap.SimpleEntry<>(node, equalsMethod), true); } } else { allMethodNodes.put(new AbstractMap.SimpleEntry<>(node, methodNode), false); } }); } else if (methodType.getSort() == Type.ARRAY) { Type elementType = methodType.getElementType(); if (elementType.getSort() == Type.OBJECT) { String parent = elementType.getInternalName(); allClasses.stream().map(name -> this.getDeobfuscator().assureLoaded(name)).forEach(node -> { boolean foundSimilar = false; boolean equals = false; MethodNode equalsMethod = null; for (MethodNode method : node.methods) { Type thisType = Type.getMethodType(methodNode.desc); Type otherType = Type.getMethodType(method.desc); if (methodNode.name.equals(method.name) && Arrays.equals(thisType.getArgumentTypes(), otherType.getArgumentTypes())) { if (otherType.getReturnType().getSort() == Type.OBJECT) { foundSimilar = true; String child = otherType.getReturnType().getInternalName(); this.getDeobfuscator().assureLoaded(parent); this.getDeobfuscator().assureLoaded(child); if (this.getDeobfuscator().isSubclass(parent, child) || this.getDeobfuscator().isSubclass(child, parent)) { equals = true; equalsMethod = method; } } } } if (foundSimilar) { if (equals) { allMethodNodes.put(new AbstractMap.SimpleEntry<>(node, equalsMethod), true); } } else { allMethodNodes.put(new AbstractMap.SimpleEntry<>(node, methodNode), false); } }); } else { allClasses.stream().map(name -> this.getDeobfuscator().assureLoaded(name)).forEach(node -> { boolean foundSimilar = false; boolean equals = false; MethodNode equalsMethod = null; for (MethodNode method : node.methods) { Type thisType = Type.getMethodType(methodNode.desc); Type otherType = Type.getMethodType(method.desc); if (methodNode.name.equals(method.name) && Arrays.equals(thisType.getArgumentTypes(), otherType.getArgumentTypes())) { foundSimilar = true; if (thisType.getReturnType().getSort() == otherType.getReturnType().getSort()) { equals = true; equalsMethod = method; } } } if (foundSimilar) { if (equals) { allMethodNodes.put(new AbstractMap.SimpleEntry<>(node, equalsMethod), true); } } else { allMethodNodes.put(new AbstractMap.SimpleEntry<>(node, methodNode), false); } }); } } else if (methodType.getSort() == Type.OBJECT) { String parent = methodType.getInternalName(); allClasses.stream().map(name -> this.getDeobfuscator().assureLoaded(name)).forEach(node -> { boolean foundSimilar = false; boolean equals = false; MethodNode equalsMethod = null; for (MethodNode method : node.methods) { Type thisType = Type.getMethodType(methodNode.desc); Type otherType = Type.getMethodType(method.desc); if (methodNode.name.equals(method.name) && Arrays.equals(thisType.getArgumentTypes(), otherType.getArgumentTypes())) { if (otherType.getReturnType().getSort() == Type.OBJECT) { foundSimilar = true; String child = otherType.getReturnType().getInternalName(); this.getDeobfuscator().assureLoaded(parent); this.getDeobfuscator().assureLoaded(child); if (this.getDeobfuscator().isSubclass(parent, child) || this.getDeobfuscator().isSubclass(child, parent)) { equals = true; equalsMethod = method; } } } } if (foundSimilar) { if (equals) { allMethodNodes.put(new AbstractMap.SimpleEntry<>(node, equalsMethod), true); } else { allMethodNodes.put(new AbstractMap.SimpleEntry<>(node, methodNode), false); } } else { allMethodNodes.put(new AbstractMap.SimpleEntry<>(node, methodNode), false); } }); } allMethodNodes.forEach((key, value) -> { if (getDeobfuscator().isLibrary(key.getKey()) && value) { isLibrary.set(true); } }); if (!isLibrary.get()) { if(!methodNames.contains( new MethodArgs(classNode.name, methodNode.name, Type.getArgumentTypes(methodNode.desc)))) methodNames.add(new MethodArgs(classNode.name, methodNode.name, Type.getArgumentTypes(methodNode.desc))); else if(!remapper.methodMappingExists(classNode.name, methodNode.name, methodNode.desc)) { while(true) { String name = methodNode.name + "_" + methodIdNow.getAndIncrement(); if(remapper.mapMethodName(classNode.name, methodNode.name, methodNode.desc, name, false)) { allMethodNodes.keySet().forEach(ent -> { remapper.mapMethodName( ent.getKey().name, ent.getValue().name, ent.getValue().desc, name, true); }); break; } } } //Check for superclass/interface method clashes Context context = new Context(new DelegatingProvider()); context.dictionary = classpath; JavaClass clazz = new JavaClass(classNode.name, context); List<JavaMethod> conflicters = new ArrayList<>(); while(clazz != null && !getDeobfuscator().isLibrary(clazz.getClassNode())) { for(JavaMethod method : clazz.getDeclaredMethods()) if(method.getName().equals(methodNode.name)) { Type[] types = Type.getArgumentTypes(method.getDesc()); Type[] types2 = Type.getArgumentTypes(methodNode.desc); boolean typesEqual = true; boolean returnTypesEqual = false; if(types.length == types2.length) { for(int i = 0; i < types.length; i++) if(types[i].getSort() == Type.OBJECT && types2[i].getSort() == Type.OBJECT && !types[i].getInternalName().equals(types2[i].getInternalName())) typesEqual = false; else if(!types2[i].equals(types[i])) typesEqual = false; }else typesEqual = false; String returnType = method.getDesc().substring(method.getDesc().indexOf(')') + 1, method.getDesc().length()); String returnType2 = methodNode.desc.substring(methodNode.desc.indexOf(')') + 1, methodNode.desc.length()); if(returnType.equals(returnType2)) returnTypesEqual = true; if(typesEqual && !returnTypesEqual && !remapper.methodMappingExists(classNode.name, methodNode.name, methodNode.desc)) //Adds the conflicter to a mapping conflicters.add(method); } clazz = clazz.getSuperclass(); } conflicters.addAll(getInterfaceConflicters(new JavaMethod(new JavaClass(classNode.name, context), methodNode), new JavaClass(classNode.name, context), remapper)); if(conflicters.size() > 0) { int id = methodIdNow.getAndIncrement(); while(true) { String name = methodNode.name + "_" + id; if(remapper.mapMethodName(classNode.name, methodNode.name, methodNode.desc, name, false)) { allMethodNodes.keySet().forEach(ent -> { remapper.mapMethodName( ent.getKey().name, ent.getValue().name, ent.getValue().desc, name, true); }); break; } } } } } }); // Aggressive field name obfuscation (same field name) classNodes().forEach(classNode -> { ClassTree tree = this.getDeobfuscator().getClassTree(classNode.name); Set<String> allClasses = new HashSet<>(); Set<String> tried = new HashSet<>(); LinkedList<String> toTry = new LinkedList<>(); toTry.add(tree.thisClass); while (!toTry.isEmpty()) { String t = toTry.poll(); if (tried.add(t) && !t.equals("java/lang/Object")) { ClassTree ct = this.getDeobfuscator().getClassTree(t); allClasses.add(t); allClasses.addAll(ct.parentClasses); allClasses.addAll(ct.subClasses); toTry.addAll(ct.parentClasses); toTry.addAll(ct.subClasses); } } for (FieldNode fieldNode : classNode.fields) { List<String> references = new ArrayList<>(); for (String possibleClass : allClasses) { ClassNode otherNode = this.getDeobfuscator().assureLoaded(possibleClass); boolean found = false; for (FieldNode otherField : otherNode.fields) { if (otherField.name.equals(fieldNode.name) && otherField.desc.equals(fieldNode.desc)) { found = true; } } if (!found) { references.add(possibleClass); } } if(!names .containsKey(fieldNode.name)) names.put(fieldNode.name, new AtomicInteger()); else if(!remapper.fieldMappingExists(classNode.name, fieldNode.name, fieldNode.desc)) { while(true) { String newName = fieldNode.name + "_" + names .get( fieldNode.name) .getAndIncrement(); if(remapper.mapFieldName(classNode.name, fieldNode.name, fieldNode.desc, newName, false)) { for(String s : references) { remapper.mapFieldName(s, fieldNode.name, fieldNode.desc, newName, true); } break; } } } } }); } private Collection<JavaMethod> getInterfaceConflicters(JavaMethod method, JavaClass clazz, CustomRemapper remapper) { Collection<JavaMethod> toReturn = new ArrayList<>(); if(getDeobfuscator().isLibrary(clazz.getClassNode())) return toReturn; for(JavaClass interf : clazz.getInterfaces()) if(!getDeobfuscator().isLibrary(interf.getClassNode())) { for(JavaMethod m : interf.getDeclaredMethods()) if(m.getName().equals(method.getName())) { Type[] types = Type.getArgumentTypes(m.getDesc()); Type[] types2 = Type.getArgumentTypes(method.getDesc()); boolean typesEqual = true; boolean returnTypesEqual = false; if(types.length == types2.length) { for(int i = 0; i < types.length; i++) if(types[i].getSort() == Type.OBJECT && types2[i].getSort() == Type.OBJECT && !types[i].getInternalName().equals(types2[i].getInternalName())) typesEqual = false; else if(!types2[i].equals(types[i])) typesEqual = false; }else typesEqual = false; String returnType = m.getDesc().substring(m.getDesc().indexOf(')') + 1, m.getDesc().length()); String returnType2 = method.getDesc().substring(method.getDesc().indexOf(')') + 1, method.getDesc().length()); if(returnType.equals(returnType2)) returnTypesEqual = true; if(typesEqual && !returnTypesEqual && !remapper.methodMappingExists(method.getOwner(), method.getName(), method.getDesc())) //Adds the conflicter to a mapping toReturn.add(m); } if(interf.getInterfaces() != null && interf.getInterfaces().length > 0) toReturn.addAll(getInterfaceConflicters(method, interf, remapper)); } return toReturn; } private class MethodArgs { public final String clazz; public final String methodName; public final Type[] args; public MethodArgs(String clazz, String methodName, Type[] args) { this.clazz = clazz; this.methodName = methodName; this.args = args; } @Override public boolean equals(Object o) { if(!(o instanceof MethodArgs)) return false; MethodArgs other = (MethodArgs)o; if(!clazz.equals(other.clazz)) return false; if(!methodName.equals(other.methodName)) return false; if(other.args.length != args.length) return false; for(int i = 0; i < other.args.length; i++) { Type t = other.args[i]; if(t.getSort() == Type.OBJECT && args[i].getSort() == Type.OBJECT && !t.getInternalName().equals(args[i].getInternalName())) return false; else if(!t.equals(args[i])) return false; } return true; } @Override public int hashCode() { int code = methodName.hashCode(); code += clazz.hashCode(); for(Type t : args) code += t.hashCode(); return code; } } public static class Config extends AbstractNormalizer.Config { public Config() { super(DuplicateRenamer.class); } } }
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.cache.impl; import com.hazelcast.cache.HazelcastCacheManager; import com.hazelcast.cache.HazelcastCachingProvider; import com.hazelcast.cache.impl.merge.policy.CacheMergePolicyProvider; import com.hazelcast.cache.impl.operation.CacheGetConfigOperation; import com.hazelcast.cache.impl.operation.CacheManagementConfigOperation; import com.hazelcast.config.CacheConfig; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.Member; import com.hazelcast.instance.HazelcastInstanceCacheManager; import com.hazelcast.instance.HazelcastInstanceImpl; import com.hazelcast.instance.HazelcastInstanceProxy; import com.hazelcast.spi.InternalCompletableFuture; import com.hazelcast.spi.NodeEngine; import com.hazelcast.spi.OperationService; import java.net.URI; import java.util.ArrayList; import java.util.Collection; import java.util.Properties; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static com.hazelcast.internal.config.ConfigValidator.checkCacheConfig; import static com.hazelcast.internal.config.MergePolicyValidator.checkMergePolicySupportsInMemoryFormat; import static com.hazelcast.util.FutureUtil.waitWithDeadline; import static com.hazelcast.util.Preconditions.checkNotNull; /** * Hazelcast {@link javax.cache.CacheManager} for server implementation. This * subclass of {@link AbstractHazelcastCacheManager} is managed by * {@link HazelcastServerCachingProvider}. * <p> * As it lives on a node JVM, it has reference to {@link CacheService} and * {@link NodeEngine} where this manager makes calls. * <p> * When JCache server implementation is configured, an instance of this class * will be returned when {@link javax.cache.spi.CachingProvider#getCacheManager()} * is called. */ public class HazelcastServerCacheManager extends AbstractHazelcastCacheManager { private final HazelcastInstanceImpl instance; private final NodeEngine nodeEngine; private final CacheService cacheService; public HazelcastServerCacheManager(HazelcastServerCachingProvider cachingProvider, HazelcastInstance hazelcastInstance, URI uri, ClassLoader classLoader, Properties properties) { super(cachingProvider, hazelcastInstance, uri, classLoader, properties); /* * TODO: * * A new interface, such as `InternalHazelcastInstance` (with a * `getOriginalInstance()` method), might be introduced. Then the * underlying actual (original) Hazelcast instance can be retrieved * through this. * * The original Hazelcast instance is used for getting access to * internals. It's also used for passing the full cache name directly * by this cache manager itself. */ if (hazelcastInstance instanceof HazelcastInstanceProxy) { instance = ((HazelcastInstanceProxy) hazelcastInstance).getOriginal(); } else { instance = (HazelcastInstanceImpl) hazelcastInstance; } nodeEngine = instance.node.getNodeEngine(); cacheService = nodeEngine.getService(ICacheService.SERVICE_NAME); } @Override public void enableManagement(String cacheName, boolean enabled) { ensureOpen(); checkNotNull(cacheName, "cacheName cannot be null"); String cacheNameWithPrefix = getCacheNameWithPrefix(cacheName); cacheService.setManagementEnabled(null, cacheNameWithPrefix, enabled); enableStatisticManagementOnOtherNodes(cacheName, false, enabled); } @Override public void enableStatistics(String cacheName, boolean enabled) { ensureOpen(); checkNotNull(cacheName, "cacheName cannot be null"); String cacheNameWithPrefix = getCacheNameWithPrefix(cacheName); cacheService.setStatisticsEnabled(null, cacheNameWithPrefix, enabled); enableStatisticManagementOnOtherNodes(cacheName, true, enabled); } private void enableStatisticManagementOnOtherNodes(String cacheName, boolean statOrMan, boolean enabled) { String cacheNameWithPrefix = getCacheNameWithPrefix(cacheName); OperationService operationService = nodeEngine.getOperationService(); Collection<Future> futures = new ArrayList<Future>(); for (Member member : nodeEngine.getClusterService().getMembers()) { if (!member.localMember()) { CacheManagementConfigOperation op = new CacheManagementConfigOperation(cacheNameWithPrefix, statOrMan, enabled); Future future = operationService.invokeOnTarget(CacheService.SERVICE_NAME, op, member.getAddress()); futures.add(future); } } waitWithDeadline(futures, CacheProxyUtil.AWAIT_COMPLETION_TIMEOUT_SECONDS, TimeUnit.SECONDS); } @Override protected <K, V> void addCacheConfigIfAbsent(CacheConfig<K, V> cacheConfig) { cacheService.putCacheConfigIfAbsent(cacheConfig); } @Override @SuppressWarnings("unchecked") protected <K, V> CacheConfig<K, V> findCacheConfig(String cacheName, String simpleCacheName) { CacheConfig<K, V> config = cacheService.getCacheConfig(cacheName); if (config == null) { config = cacheService.findCacheConfig(simpleCacheName); if (config != null) { config.setManagerPrefix(cacheName.substring(0, cacheName.lastIndexOf(simpleCacheName))); } else { // if cache config is still not found, try to get it from a partition config = getCacheConfig(cacheName, simpleCacheName); } } if (config != null) { /* * Also create the cache config on other nodes to be sure that the * cache config is exist on all nodes. This is needed because even * though the cache config exists on this node (for example added * by an in-flight cache config creation operation), it still might * not exist on other nodes yet (but will be created eventually). */ createCacheConfig(cacheName, config); } return config; } @Override protected <K, V> void createCacheConfig(String cacheName, CacheConfig<K, V> config) { cacheService.createCacheConfigOnAllMembers(PreJoinCacheConfig.of(config)); } @Override protected <K, V> ICacheInternal<K, V> createCacheProxy(CacheConfig<K, V> cacheConfig) { HazelcastInstanceCacheManager cacheManager = instance.getCacheManager(); CacheProxy<K, V> cacheProxy = (CacheProxy<K, V>) cacheManager.getCacheByFullName(cacheConfig.getNameWithPrefix()); cacheProxy.setCacheManager(this); return cacheProxy; } @Override protected <K, V> CacheConfig<K, V> getCacheConfig(String cacheNameWithPrefix, String cacheName) { CacheGetConfigOperation op = new CacheGetConfigOperation(cacheNameWithPrefix, cacheName); int partitionId = nodeEngine.getPartitionService().getPartitionId(cacheNameWithPrefix); InternalCompletableFuture<CacheConfig<K, V>> f = nodeEngine.getOperationService() .invokeOnPartition(CacheService.SERVICE_NAME, op, partitionId); return f.join(); } @Override protected void removeCacheConfigFromLocal(String cacheNameWithPrefix) { cacheService.deleteCacheConfig(cacheNameWithPrefix); super.removeCacheConfigFromLocal(cacheNameWithPrefix); } @Override protected <K, V> void validateCacheConfig(CacheConfig<K, V> cacheConfig) { CacheMergePolicyProvider mergePolicyProvider = cacheService.getMergePolicyProvider(); checkCacheConfig(cacheConfig, mergePolicyProvider); Object mergePolicy = mergePolicyProvider.getMergePolicy(cacheConfig.getMergePolicy()); checkMergePolicySupportsInMemoryFormat(cacheConfig.getName(), mergePolicy, cacheConfig.getInMemoryFormat(), true, nodeEngine.getLogger(HazelcastCacheManager.class)); } @Override @SuppressWarnings("unchecked") public <T> T unwrap(Class<T> clazz) { if (HazelcastServerCacheManager.class.isAssignableFrom(clazz)) { return (T) this; } throw new IllegalArgumentException(); } protected void postClose() { if (properties.getProperty(HazelcastCachingProvider.HAZELCAST_CONFIG_LOCATION) != null) { hazelcastInstance.shutdown(); } } @Override protected void onShuttingDown() { close(); } public ICacheService getCacheService() { return cacheService; } }
package io.nem.apps.model; import java.io.Serializable; import org.nem.core.model.Account; import org.nem.core.model.TransferTransactionAttachment; import org.nem.core.time.TimeInstant; /** * The Class TransactionBlock. */ public class TransactionBlock implements Serializable { /** The Constant serialVersionUID. */ private static final long serialVersionUID = 1L; /** The sender. */ private Account sender; /** The recipient. */ private Account recipient; /** * Gets the mulit sig. * * @return the mulit sig */ public Account getMulitSig() { return mulitSig; } /** * Sets the mulit sig. * * @param mulitSig * the new mulit sig */ public void setMulitSig(Account mulitSig) { this.mulitSig = mulitSig; } /** The mulit sig. */ private Account mulitSig; /** The amount. */ private Long amount = 0l; /** The attachment. */ private TransferTransactionAttachment attachment; /** The payload. */ private String payload; /** The transaction message type. */ private TransactionMessageType transactionMessageType; /** The time instant. */ private TimeInstant timeInstant; /** * Gets the sender. * * @return the sender */ public Account getSender() { return sender; } /** * Sets the sender. * * @param sender * the new sender */ public void setSender(Account sender) { this.sender = sender; } /** * Gets the recipient. * * @return the recipient */ public Account getRecipient() { return recipient; } /** * Sets the recipient. * * @param recipient * the new recipient */ public void setRecipient(Account recipient) { this.recipient = recipient; } /** * Gets the amount. * * @return the amount */ public Long getAmount() { return amount; } /** * Sets the amount. * * @param amount * the new amount */ public void setAmount(Long amount) { this.amount = amount; } /** * Gets the attachment. * * @return the attachment */ public TransferTransactionAttachment getAttachment() { return attachment; } /** * Sets the attachment. * * @param attachment * the new attachment */ public void setAttachment(TransferTransactionAttachment attachment) { this.attachment = attachment; } /** * Gets the time instant. * * @return the time instant */ public TimeInstant getTimeInstant() { return timeInstant; } /** * Gets the payload. * * @return the payload */ public String getPayload() { return payload; } /** * Sets the payload. * * @param payload * the new payload */ public void setPayload(String payload) { this.payload = payload; } /** * Sets the time instant. * * @param timeInstant * the new time instant */ public void setTimeInstant(TimeInstant timeInstant) { this.timeInstant = timeInstant; } /** * Gets the transaction message type. * * @return the transaction message type */ public TransactionMessageType getTransactionMessageType() { return transactionMessageType; } /** * Sets the transaction message type. * * @param transactionMessageType * the new transaction message type */ public void setTransactionMessageType(TransactionMessageType transactionMessageType) { this.transactionMessageType = transactionMessageType; } /* * (non-Javadoc) * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TransactionBlock other = (TransactionBlock) obj; if (recipient == null) { if (other.recipient != null) return false; } else if (!recipient.equals(other.recipient)) return false; if (sender == null) { if (other.sender != null) return false; } else if (!sender.equals(other.sender)) return false; if (timeInstant == null) { if (other.timeInstant != null) return false; } else if (!timeInstant.equals(other.timeInstant)) return false; if (attachment == null) { if (other.attachment != null) return false; } else if (!attachment.equals(other.attachment)) return false; return true; } /* * (non-Javadoc) * * @see java.lang.Object#toString() */ @Override public String toString() { return "TransactionBlock [sender=" + sender + ", recipient=" + recipient + ", amount=" + amount + ", attachment=" + attachment.toString() + ", timeInstant=" + timeInstant + "]"; } }
////////////////////////////////////////////////////////////////////////////// // // Copyright 2019, Cornutum Project // www.cornutum.org // ////////////////////////////////////////////////////////////////////////////// package org.cornutum.tcases.openapi.resolver; import org.cornutum.tcases.openapi.Characters; import org.cornutum.tcases.openapi.FormattedString; import org.cornutum.tcases.util.MapBuilder; import org.junit.Test; import static org.cornutum.hamcrest.ExpectedFailure.expectFailure; import static org.hamcrest.MatcherAssert.*; import static org.hamcrest.Matchers.*; import java.math.BigDecimal; import java.util.Calendar; import java.util.Date; import java.util.Map; import static java.util.Collections.emptyMap; /** * Runs tests for {@link ObjectDomain}. */ public class ObjectDomainTest extends ValueDomainTest { @Test public void whenAdditionalPropertiesOnly() { // Given... ObjectDomain domain = new ObjectDomain(); // When... domain.setAdditionalPropertyCount( new IntegerConstant( 1)); domain.setAdditionalPropertyValues( new DecimalDomain( 0.0, 1.0)); // Then... verifyContainsValues( domain, 1000); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( "a", new DecimalValue( new BigDecimal( "0.0"))) .build()), is( true)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .build()), is( false)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( "A", new DecimalValue( new BigDecimal( "0.0"))) .build()), is( false)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( "a", new DecimalValue( new BigDecimal( "-1.0"))) .build()), is( false)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( "a", new StringValue( "0.0")) .build()), is( false)); } @Test public void whenManyProperties() { // Given... ObjectDomain domain = new ObjectDomain(); // When... MapBuilder<String,ValueDomain<?>> properties = new MapBuilder<String,ValueDomain<?>>(); domain.setPropertyDomains( properties .put( dateString( 2020, 1, 2), new AsciiStringDomain( 4)) .put( dateString( 2020, 1, 4), new IntegerDomain( 0, 100)) .put( dateString( 2020, 1, 6), new DecimalDomain( -1.25, 1.25)) .build()); domain.setAdditionalPropertyCount( new IntegerDomain( 0, 4)); domain.setAdditionalPropertyNames( new DateDomain( date( 2020, 1, 1), date( 2020, 1, 7))); // Then... verifyContainsValues( domain, 1000); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( dateString( 2020, 1, 2), new StringValue( "ABCD")) .put( dateString( 2020, 1, 4), new IntegerValue( 0)) .put( dateString( 2020, 1, 6), new DecimalValue( new BigDecimal( "-1.00"))) .build()), is( true)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( dateString( 2020, 1, 2), new StringValue( "0")) .put( dateString( 2020, 1, 4), new IntegerValue( 100)) .put( dateString( 2020, 1, 6), new DecimalValue( new BigDecimal( "1.25"))) .put( dateString( 2020, 1, 1), new StringValue( "Howdy!")) .put( dateString( 2020, 1, 3), new IntegerValue( -987)) .put( dateString( 2020, 1, 5), new ObjectValue( emptyMap())) .put( dateString( 2020, 1, 7), new DecimalValue( BigDecimal.ZERO)) .build()), is( true)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( dateString( 2020, 1, 2), new StringValue( "0")) .put( dateString( 2020, 1, 4), new IntegerValue( 0)) .build()), is( false)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( dateString( 2020, 1, 2), new StringValue( "ABCD")) .put( dateString( 2020, 1, 4), new IntegerValue( 1)) .put( dateString( 2020, 1, 6), new StringValue( "-1.00")) .build()), is( false)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( dateString( 2020, 1, 2), new StringValue( "ABCD")) .put( dateString( 2020, 1, 4), new IntegerValue( 0)) .put( dateString( 2020, 1, 6), new DecimalValue( new BigDecimal( "0.12"))) .put( "myString", new StringValue( "?")) .build()), is( false)); } @Test public void whenNoAdditionalProperties() { // Given... ObjectDomain domain = new ObjectDomain(); // When... MapBuilder<String,ValueDomain<?>> properties = new MapBuilder<String,ValueDomain<?>>(); domain.setPropertyDomains( properties.put( "alpha", new AsciiStringDomain( 4)).build()); // Then... verifyContainsValues( domain, 1000); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( "alpha", new StringValue( "ABCD")) .build()), is( true)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .build()), is( false)); assertThat( "Contains", domain.contains( new MapBuilder<String,DataValue<?>>() .put( "alpha", new StringValue( "ABCD")) .put( "bravo", new IntegerValue( 0)) .build()), is( false)); } @Test public void whenInvalidPropertyName() { // Given... ObjectDomain domain = new ObjectDomain( Characters.TOKEN); // When... Map<String,ValueDomain<?>> properties = new MapBuilder<String,ValueDomain<?>>().put( "<id>", new AsciiStringDomain( 4)).build(); // Then... expectFailure( ValueDomainException.class) .when( () -> domain.setPropertyDomains( properties)); } /** * Returns the given date as a string. */ private String dateString( int year, int month, int day) { return FormattedString.getDateFormat().format( date( year, month, day)); } /** * Returns the given date. */ private Date date( int year, int month, int day) { return new Calendar.Builder().setDate( year, month, day).build().getTime(); } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.ui; import com.intellij.icons.AllIcons; import com.intellij.ide.DataManager; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.TransactionGuard; import com.intellij.openapi.fileTypes.FileTypes; import com.intellij.openapi.fileTypes.INativeFileType; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.util.ActionCallback; import com.intellij.openapi.vfs.PersistentFSConstants; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.wm.ToolWindow; import com.intellij.pom.Navigatable; import com.intellij.util.Alarm; import com.intellij.util.OpenSourceUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.TreePath; import java.awt.*; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionAdapter; public abstract class AutoScrollToSourceHandler { private final Alarm myAutoScrollAlarm = new Alarm(); public void install(final JTree tree) { new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { if (clickCount > 1) return false; TreePath location = tree.getPathForLocation(e.getPoint().x, e.getPoint().y); if (location != null) { onMouseClicked(tree); return isAutoScrollMode(); } return false; } }.installOn(tree); tree.addMouseMotionListener(new MouseMotionAdapter() { @Override public void mouseDragged(final MouseEvent e) { onSelectionChanged(tree); } }); tree.addTreeSelectionListener( new TreeSelectionListener() { @Override public void valueChanged(TreeSelectionEvent e) { onSelectionChanged(tree); } } ); } public void install(final JTable table) { new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { if (clickCount >= 2) return false; Component location = table.getComponentAt(e.getPoint()); if (location != null) { onMouseClicked(table); return isAutoScrollMode(); } return false; } }.installOn(table); table.addMouseMotionListener(new MouseMotionAdapter() { @Override public void mouseDragged(final MouseEvent e) { onSelectionChanged(table); } }); table.getSelectionModel().addListSelectionListener( new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { onSelectionChanged(table); } } ); } public void install(final JList jList) { new ClickListener() { @Override public boolean onClick(@NotNull MouseEvent e, int clickCount) { if (clickCount >= 2) return false; final Object source = e.getSource(); final int index = jList.locationToIndex(SwingUtilities.convertPoint(source instanceof Component ? (Component)source : null, e.getPoint(), jList)); if (index >= 0 && index < jList.getModel().getSize()) { onMouseClicked(jList); return true; } return false; } }.installOn(jList); jList.addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { onSelectionChanged(jList); } }); } public void cancelAllRequests(){ myAutoScrollAlarm.cancelAllRequests(); } public void onMouseClicked(final Component component) { cancelAllRequests(); if (isAutoScrollMode()){ ApplicationManager.getApplication().invokeLater(() -> scrollToSource(component)); } } private void onSelectionChanged(final Component component) { if (component != null && component.isShowing() && isAutoScrollMode()) { myAutoScrollAlarm.cancelAllRequests(); myAutoScrollAlarm.addRequest( () -> { if (component.isShowing()) { //for tests if (!needToCheckFocus() || component.hasFocus()) { scrollToSource(component); } } }, 500 ); } } protected boolean needToCheckFocus(){ return true; } protected abstract boolean isAutoScrollMode(); protected abstract void setAutoScrollMode(boolean state); protected void scrollToSource(final Component tree) { DataContext dataContext=DataManager.getInstance().getDataContext(tree); getReady(dataContext).doWhenDone(() -> TransactionGuard.submitTransaction(ApplicationManager.getApplication(), () -> { DataContext context = DataManager.getInstance().getDataContext(tree); final VirtualFile vFile = CommonDataKeys.VIRTUAL_FILE.getData(context); if (vFile != null) { // Attempt to navigate to the virtual file with unknown file type will show a modal dialog // asking to register some file type for this file. This behaviour is undesirable when autoscrolling. if (vFile.getFileType() == FileTypes.UNKNOWN || vFile.getFileType() instanceof INativeFileType) return; //IDEA-84881 Don't autoscroll to very large files if (vFile.getLength() > PersistentFSConstants.getMaxIntellisenseFileSize()) return; } Navigatable[] navigatables = CommonDataKeys.NAVIGATABLE_ARRAY.getData(context); if (navigatables != null && navigatables.length == 1) { OpenSourceUtil.navigateToSource(false, true, navigatables[0]); } })); } @NotNull public ToggleAction createToggleAction() { return new AutoscrollToSourceAction(); } private class AutoscrollToSourceAction extends ToggleAction implements DumbAware { AutoscrollToSourceAction() { super(UIBundle.message("autoscroll.to.source.action.name"), UIBundle.message("autoscroll.to.source.action.description"), AllIcons.General.AutoscrollToSource); } @Override public boolean isSelected(@NotNull AnActionEvent event) { return isAutoScrollMode(); } @Override public void setSelected(@NotNull AnActionEvent event, boolean flag) { setAutoScrollMode(flag); } } private ActionCallback getReady(DataContext context) { ToolWindow toolWindow = PlatformDataKeys.TOOL_WINDOW.getData(context); return toolWindow != null ? toolWindow.getReady(this) : ActionCallback.DONE; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.application.impl; import com.intellij.ide.plugins.PluginManagerCore; import com.intellij.openapi.application.ApplicationNamesInfo; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.application.ex.ApplicationInfoEx; import com.intellij.openapi.components.NamedComponent; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.*; import com.intellij.openapi.util.text.StringUtil; import com.intellij.ui.JBColor; import com.intellij.util.ArrayUtil; import com.intellij.util.Function; import com.intellij.util.PlatformUtils; import com.intellij.util.containers.ContainerUtil; import org.jdom.Document; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.awt.*; import java.io.File; import java.io.FileNotFoundException; import java.text.MessageFormat; import java.util.*; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; public class ApplicationInfoImpl extends ApplicationInfoEx implements JDOMExternalizable, NamedComponent { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.application.impl.ApplicationInfoImpl"); private String myCodeName = null; private String myMajorVersion = null; private String myMinorVersion = null; private String myMicroVersion = null; private String myPatchVersion = null; private String myFullVersion = null; private String myBuildNumber = null; private String myApiVersion = null; private String myCompanyName = "JetBrains s.r.o."; private String myCompanyUrl = "https://www.jetbrains.com/"; private Color myProgressColor = null; private Color myCopyrightForeground = JBColor.BLACK; private Color myAboutForeground = JBColor.BLACK; private Color myAboutLinkColor = null; private String myProgressTailIconName = null; private Icon myProgressTailIcon = null; private int myProgressY = 350; private String mySplashImageUrl = null; private String myAboutImageUrl = null; private Color mySplashTextColor = new Color(0, 35, 135); // idea blue private String myIconUrl = "/icon.png"; private String mySmallIconUrl = "/icon_small.png"; private String myBigIconUrl = null; private String myToolWindowIconUrl = "/toolwindows/toolWindowProject.png"; private String myWelcomeScreenLogoUrl = null; private String myEditorBackgroundImageUrl = null; private Calendar myBuildDate = null; private Calendar myMajorReleaseBuildDate = null; private String myPackageCode = null; private boolean myShowLicensee = true; private String myCustomizeIDEWizardStepsProvider; private UpdateUrls myUpdateUrls; private String myDocumentationUrl; private String mySupportUrl; private String myEAPFeedbackUrl; private String myReleaseFeedbackUrl; private String myPluginManagerUrl; private String myPluginsListUrl; private String myPluginsDownloadUrl; private String myBuiltinPluginsUrl; private String myWhatsNewUrl; private String myWinKeymapUrl; private String myMacKeymapUrl; private boolean myEAP; private boolean myHasHelp = true; private boolean myHasContextHelp = true; private String myHelpFileName = "ideahelp.jar"; private String myHelpRootName = "idea"; private String myWebHelpUrl = "https://www.jetbrains.com/idea/webhelp/"; private List<PluginChooserPage> myPluginChooserPages = new ArrayList<PluginChooserPage>(); private String[] myEssentialPluginsIds; private String myStatisticsSettingsUrl; private String myStatisticsServiceUrl; private String myStatisticsServiceKey; private String myThirdPartySoftwareUrl; private String myJetbrainsTvUrl; private String myEvalLicenseUrl = "https://www.jetbrains.com/company/useterms.html"; private Rectangle myAboutLogoRect; private static final String IDEA_PATH = "/idea/"; private static final String ELEMENT_VERSION = "version"; private static final String ATTRIBUTE_MAJOR = "major"; private static final String ATTRIBUTE_MINOR = "minor"; private static final String ATTRIBUTE_MICRO = "micro"; private static final String ATTRIBUTE_PATCH = "patch"; private static final String ATTRIBUTE_FULL = "full"; private static final String ATTRIBUTE_CODENAME = "codename"; private static final String ATTRIBUTE_NAME = "name"; private static final String ELEMENT_BUILD = "build"; private static final String ELEMENT_COMPANY = "company"; private static final String ATTRIBUTE_NUMBER = "number"; private static final String ATTRIBUTE_API_VERSION = "apiVersion"; private static final String ATTRIBUTE_DATE = "date"; private static final String ATTRIBUTE_MAJOR_RELEASE_DATE = "majorReleaseDate"; private static final String ELEMENT_LOGO = "logo"; private static final String ATTRIBUTE_URL = "url"; private static final String ATTRIBUTE_TEXT_COLOR = "textcolor"; private static final String ATTRIBUTE_PROGRESS_COLOR = "progressColor"; private static final String ATTRIBUTE_ABOUT_FOREGROUND_COLOR = "foreground"; private static final String ATTRIBUTE_ABOUT_COPYRIGHT_FOREGROUND_COLOR = "copyrightForeground"; private static final String ATTRIBUTE_ABOUT_LINK_COLOR = "linkColor"; private static final String ATTRIBUTE_PROGRESS_Y = "progressY"; private static final String ATTRIBUTE_PROGRESS_TAIL_ICON = "progressTailIcon"; private static final String ELEMENT_ABOUT = "about"; private static final String ELEMENT_ICON = "icon"; private static final String ATTRIBUTE_SIZE32 = "size32"; private static final String ATTRIBUTE_SIZE128 = "size128"; private static final String ATTRIBUTE_SIZE16 = "size16"; private static final String ATTRIBUTE_SIZE12 = "size12"; private static final String ELEMENT_PACKAGE = "package"; private static final String ATTRIBUTE_CODE = "code"; private static final String ELEMENT_LICENSEE = "licensee"; private static final String ATTRIBUTE_SHOW = "show"; private static final String WELCOME_SCREEN_ELEMENT_NAME = "welcome-screen"; private static final String LOGO_URL_ATTR = "logo-url"; private static final String ELEMENT_EDITOR = "editor"; private static final String BACKGROUND_URL_ATTR = "background-url"; private static final String UPDATE_URLS_ELEMENT_NAME = "update-urls"; private static final String XML_EXTENSION = ".xml"; private static final String ATTRIBUTE_EAP = "eap"; private static final String HELP_ELEMENT_NAME = "help"; private static final String ATTRIBUTE_HELP_FILE = "file"; private static final String ATTRIBUTE_HELP_ROOT = "root"; private static final String PLUGINS_PAGE_ELEMENT_NAME = "plugins-page"; private static final String ELEMENT_DOCUMENTATION = "documentation"; private static final String ELEMENT_SUPPORT = "support"; private static final String ELEMENT_FEEDBACK = "feedback"; private static final String ATTRIBUTE_RELEASE_URL = "release-url"; private static final String ATTRIBUTE_EAP_URL = "eap-url"; private static final String ELEMENT_PLUGINS = "plugins"; private static final String ATTRIBUTE_LIST_URL = "list-url"; private static final String ATTRIBUTE_DOWNLOAD_URL = "download-url"; private static final String ATTRIBUTE_BUILTIN_URL = "builtin-url"; private static final String ATTRIBUTE_WEBHELP_URL = "webhelp-url"; private static final String ATTRIBUTE_HAS_HELP = "has-help"; private static final String ATTRIBUTE_HAS_CONTEXT_HELP = "has-context-help"; private static final String ELEMENT_WHATSNEW = "whatsnew"; private static final String ELEMENT_KEYMAP = "keymap"; private static final String ATTRIBUTE_WINDOWS_URL = "win"; private static final String ATTRIBUTE_MAC_URL = "mac"; private static final String ELEMENT_STATISTICS = "statistics"; private static final String ATTRIBUTE_STATISTICS_SETTINGS = "settings"; private static final String ATTRIBUTE_STATISTICS_SERVICE = "service"; private static final String ATTRIBUTE_STATISTICS_SERVICE_KEY = "service-key"; private static final String ELEMENT_THIRD_PARTY = "third-party"; private static final String ELEMENT_JB_TV = "jetbrains-tv"; private static final String CUSTOMIZE_IDE_WIZARD_STEPS = "customize-ide-wizard"; private static final String STEPS_PROVIDER = "provider"; private static final String ELEMENT_EVALUATION = "evaluation"; private static final String ATTRIBUTE_EVAL_LICENSE_URL = "license-url"; private static final String ESSENTIAL_PLUGIN = "essential-plugin"; private static final String DEFAULT_PLUGINS_HOST = "http://plugins.jetbrains.com"; @Override public Calendar getBuildDate() { return myBuildDate; } @Override public Calendar getMajorReleaseBuildDate() { return myMajorReleaseBuildDate != null ? myMajorReleaseBuildDate : myBuildDate; } @Override public BuildNumber getBuild() { return BuildNumber.fromString(myBuildNumber, getProductPrefix()); } private static String getProductPrefix() { String prefix = null; if (PlatformUtils.isIdeaCommunity()) { prefix = "IC"; } else if (PlatformUtils.isIdeaUltimate()) { prefix = "IU"; } return prefix; } @Override public String getApiVersion() { if (myApiVersion != null) { return BuildNumber.fromString(myApiVersion, getBuild().getProductCode()).asString(); } return getBuild().asString(); } @Override public String getMajorVersion() { return myMajorVersion; } @Override public String getMinorVersion() { return myMinorVersion; } @Override public String getMicroVersion() { return myMicroVersion; } @Override public String getPatchVersion() { return myPatchVersion; } @Override public String getFullVersion() { if (myFullVersion == null) { if (!StringUtil.isEmptyOrSpaces(myMajorVersion)) { if (!StringUtil.isEmptyOrSpaces(myMinorVersion)) { return myMajorVersion + "." + myMinorVersion; } else { return myMajorVersion + ".0"; } } else { return getVersionName(); } } else { return MessageFormat.format(myFullVersion, myMajorVersion, myMinorVersion, myMicroVersion, myPatchVersion); } } @Override public String getStrictVersion() { return myMajorVersion + "." + myMinorVersion + "." + StringUtil.notNullize(myMicroVersion, "0") + "." + StringUtil.notNullize(myPatchVersion, "0"); } @Override public String getVersionName() { final String fullName = ApplicationNamesInfo.getInstance().getFullProductName(); if (myEAP && !StringUtil.isEmptyOrSpaces(myCodeName)) { return fullName + " (" + myCodeName + ")"; } return fullName; } @Override public String getHelpURL() { return "jar:file:///" + getHelpJarPath() + "!/" + myHelpRootName; } @Override public String getCompanyName() { return myCompanyName; } @Override public String getCompanyURL() { return myCompanyUrl; } private String getHelpJarPath() { return PathManager.getHomePath() + File.separator + "help" + File.separator + myHelpFileName; } @Override public String getSplashImageUrl() { return mySplashImageUrl; } @Override public Color getSplashTextColor() { return mySplashTextColor; } @Override public String getAboutImageUrl() { return myAboutImageUrl; } public Color getProgressColor() { return myProgressColor; } public Color getCopyrightForeground() { return myCopyrightForeground; } public int getProgressY() { return myProgressY; } @Nullable public Icon getProgressTailIcon() { if (myProgressTailIcon == null && myProgressTailIconName != null) { myProgressTailIcon = IconLoader.getIcon(myProgressTailIconName); } return myProgressTailIcon; } @Override public String getIconUrl() { return myIconUrl; } @Override public String getSmallIconUrl() { return mySmallIconUrl; } @Override @Nullable public String getBigIconUrl() { return myBigIconUrl; } @Override public String getToolWindowIconUrl() { return myToolWindowIconUrl; } @Override public String getWelcomeScreenLogoUrl() { return myWelcomeScreenLogoUrl; } @Nullable @Override public String getCustomizeIDEWizardStepsProvider() { return myCustomizeIDEWizardStepsProvider; } @Override public String getEditorBackgroundImageUrl() { return myEditorBackgroundImageUrl; } @Override public String getPackageCode() { return myPackageCode; } @Override public boolean isEAP() { return myEAP; } @Override public UpdateUrls getUpdateUrls() { return myUpdateUrls; } @Override public String getDocumentationUrl() { return myDocumentationUrl; } @Override public String getSupportUrl() { return mySupportUrl; } @Override public String getEAPFeedbackUrl() { return myEAPFeedbackUrl; } @Override public String getReleaseFeedbackUrl() { return myReleaseFeedbackUrl; } @Override public String getPluginManagerUrl() { return myPluginManagerUrl; } @Override public String getPluginsListUrl() { return myPluginsListUrl; } @Override public String getPluginsDownloadUrl() { return myPluginsDownloadUrl; } @Override public String getBuiltinPluginsUrl() { return myBuiltinPluginsUrl; } @Override public String getWebHelpUrl() { return myWebHelpUrl; } @Override public boolean hasHelp() { return myHasHelp; } @Override public boolean hasContextHelp() { return myHasContextHelp; } @Override public String getWhatsNewUrl() { return myWhatsNewUrl; } @Override public String getWinKeymapUrl() { return myWinKeymapUrl; } @Override public String getMacKeymapUrl() { return myMacKeymapUrl; } @Override public Color getAboutForeground() { return myAboutForeground; } public Color getAboutLinkColor() { return myAboutLinkColor; } @Override public String getFullApplicationName() { StringBuilder buffer = new StringBuilder(); buffer.append(getVersionName()); buffer.append(" "); if (getMajorVersion() != null && !isEAP() && !isBetaOrRC()) { buffer.append(getFullVersion()); } else { buffer.append(getBuild().asStringWithAllDetails()); } return buffer.toString(); } @Override public boolean showLicenseeInfo() { return myShowLicensee; } public String getStatisticsSettingsUrl() { return myStatisticsSettingsUrl; } public String getStatisticsServiceUrl() { return myStatisticsServiceUrl; } public String getStatisticsServiceKey() { return myStatisticsServiceKey; } @Override public String getThirdPartySoftwareURL() { return myThirdPartySoftwareUrl; } @Override public String getJetbrainsTvUrl() { return myJetbrainsTvUrl; } @Override public String getEvalLicenseUrl() { return myEvalLicenseUrl; } @Override public Rectangle getAboutLogoRect() { return myAboutLogoRect; } private static ApplicationInfoImpl ourShadowInstance; public boolean isBetaOrRC() { String minor = getMinorVersion(); if (minor != null) { if (minor.contains("RC") || minor.contains("Beta") || minor.contains("beta")) { return true; } } return false; } public static ApplicationInfoEx getShadowInstance() { if (ourShadowInstance == null) { ourShadowInstance = new ApplicationInfoImpl(); try { Document doc = JDOMUtil.loadDocument(ApplicationInfoImpl.class, IDEA_PATH + ApplicationNamesInfo.getComponentName() + XML_EXTENSION); ourShadowInstance.readExternal(doc.getRootElement()); } catch (FileNotFoundException e) { LOG.error("Resource is not in classpath or wrong platform prefix: " + System.getProperty(PlatformUtils.PLATFORM_PREFIX_KEY), e); } catch (Exception e) { LOG.error(e); } } return ourShadowInstance; } @Override public void readExternal(Element parentNode) throws InvalidDataException { Element versionElement = parentNode.getChild(ELEMENT_VERSION); if (versionElement != null) { myMajorVersion = versionElement.getAttributeValue(ATTRIBUTE_MAJOR); myMinorVersion = versionElement.getAttributeValue(ATTRIBUTE_MINOR); myMicroVersion = versionElement.getAttributeValue(ATTRIBUTE_MICRO); myPatchVersion = versionElement.getAttributeValue(ATTRIBUTE_PATCH); myFullVersion = versionElement.getAttributeValue(ATTRIBUTE_FULL); myCodeName = versionElement.getAttributeValue(ATTRIBUTE_CODENAME); myEAP = Boolean.parseBoolean(versionElement.getAttributeValue(ATTRIBUTE_EAP)); } Element companyElement = parentNode.getChild(ELEMENT_COMPANY); if (companyElement != null) { myCompanyName = companyElement.getAttributeValue(ATTRIBUTE_NAME, myCompanyName); myCompanyUrl = companyElement.getAttributeValue(ATTRIBUTE_URL, myCompanyUrl); } Element buildElement = parentNode.getChild(ELEMENT_BUILD); if (buildElement != null) { myBuildNumber = buildElement.getAttributeValue(ATTRIBUTE_NUMBER); myApiVersion = buildElement.getAttributeValue(ATTRIBUTE_API_VERSION); setBuildNumber(myApiVersion, myBuildNumber); String dateString = buildElement.getAttributeValue(ATTRIBUTE_DATE); if (dateString.equals("__BUILD_DATE__")) { myBuildDate = new GregorianCalendar(); try { final JarFile bootJar = new JarFile(PathManager.getHomePath() + File.separator + "lib" + File.separator + "boot.jar"); try { final JarEntry jarEntry = bootJar.entries().nextElement(); // /META-INF is always updated on build myBuildDate.setTime(new Date(jarEntry.getTime())); } finally { bootJar.close(); } } catch (Exception ignore) { } } else { myBuildDate = parseDate(dateString); } String majorReleaseDateString = buildElement.getAttributeValue(ATTRIBUTE_MAJOR_RELEASE_DATE); if (majorReleaseDateString != null) { myMajorReleaseBuildDate = parseDate(majorReleaseDateString); } } Thread currentThread = Thread.currentThread(); currentThread.setName(currentThread.getName() + " " + myMajorVersion + "." + myMinorVersion + "#" + myBuildNumber + ", eap:" + myEAP); Element logoElement = parentNode.getChild(ELEMENT_LOGO); if (logoElement != null) { mySplashImageUrl = logoElement.getAttributeValue(ATTRIBUTE_URL); mySplashTextColor = parseColor(logoElement.getAttributeValue(ATTRIBUTE_TEXT_COLOR)); String v = logoElement.getAttributeValue(ATTRIBUTE_PROGRESS_COLOR); if (v != null) { myProgressColor = parseColor(v); } v = logoElement.getAttributeValue(ATTRIBUTE_PROGRESS_TAIL_ICON); if (v != null) { myProgressTailIconName = v; } v = logoElement.getAttributeValue(ATTRIBUTE_PROGRESS_Y); if (v != null) { myProgressY = Integer.parseInt(v); } } Element aboutLogoElement = parentNode.getChild(ELEMENT_ABOUT); if (aboutLogoElement != null) { myAboutImageUrl = aboutLogoElement.getAttributeValue(ATTRIBUTE_URL); String v = aboutLogoElement.getAttributeValue(ATTRIBUTE_ABOUT_FOREGROUND_COLOR); if (v != null) { myAboutForeground = parseColor(v); } v = aboutLogoElement.getAttributeValue(ATTRIBUTE_ABOUT_COPYRIGHT_FOREGROUND_COLOR); if (v != null) { myCopyrightForeground = parseColor(v); } String c = aboutLogoElement.getAttributeValue(ATTRIBUTE_ABOUT_LINK_COLOR); if (c != null) { myAboutLinkColor = parseColor(c); } String logoX = aboutLogoElement.getAttributeValue("logoX"); String logoY = aboutLogoElement.getAttributeValue("logoY"); String logoW = aboutLogoElement.getAttributeValue("logoW"); String logoH = aboutLogoElement.getAttributeValue("logoH"); if (logoX != null && logoY != null && logoW != null && logoH != null) { try { myAboutLogoRect = new Rectangle(Integer.parseInt(logoX), Integer.parseInt(logoY), Integer.parseInt(logoW), Integer.parseInt(logoH)); } catch (NumberFormatException nfe) { // ignore } } } Element iconElement = parentNode.getChild(ELEMENT_ICON); if (iconElement != null) { myIconUrl = iconElement.getAttributeValue(ATTRIBUTE_SIZE32); mySmallIconUrl = iconElement.getAttributeValue(ATTRIBUTE_SIZE16); myBigIconUrl = iconElement.getAttributeValue(ATTRIBUTE_SIZE128, (String)null); final String toolWindowIcon = iconElement.getAttributeValue(ATTRIBUTE_SIZE12); if (toolWindowIcon != null) { myToolWindowIconUrl = toolWindowIcon; } } Element packageElement = parentNode.getChild(ELEMENT_PACKAGE); if (packageElement != null) { myPackageCode = packageElement.getAttributeValue(ATTRIBUTE_CODE); } Element showLicensee = parentNode.getChild(ELEMENT_LICENSEE); if (showLicensee != null) { myShowLicensee = Boolean.valueOf(showLicensee.getAttributeValue(ATTRIBUTE_SHOW)).booleanValue(); } Element welcomeScreen = parentNode.getChild(WELCOME_SCREEN_ELEMENT_NAME); if (welcomeScreen != null) { myWelcomeScreenLogoUrl = welcomeScreen.getAttributeValue(LOGO_URL_ATTR); } Element wizardSteps = parentNode.getChild(CUSTOMIZE_IDE_WIZARD_STEPS); if (wizardSteps != null) { myCustomizeIDEWizardStepsProvider = wizardSteps.getAttributeValue(STEPS_PROVIDER); } Element editor = parentNode.getChild(ELEMENT_EDITOR); if (editor != null) { myEditorBackgroundImageUrl = editor.getAttributeValue(BACKGROUND_URL_ATTR); } Element helpElement = parentNode.getChild(HELP_ELEMENT_NAME); if (helpElement != null) { myHelpFileName = helpElement.getAttributeValue(ATTRIBUTE_HELP_FILE); myHelpRootName = helpElement.getAttributeValue(ATTRIBUTE_HELP_ROOT); final String webHelpUrl = helpElement.getAttributeValue(ATTRIBUTE_WEBHELP_URL); if (webHelpUrl != null) { myWebHelpUrl = webHelpUrl; } String attValue = helpElement.getAttributeValue(ATTRIBUTE_HAS_HELP); myHasHelp = attValue == null || Boolean.parseBoolean(attValue); // Default is true attValue = helpElement.getAttributeValue(ATTRIBUTE_HAS_CONTEXT_HELP); myHasContextHelp = attValue == null || Boolean.parseBoolean(attValue); // Default is true } Element updateUrls = parentNode.getChild(UPDATE_URLS_ELEMENT_NAME); myUpdateUrls = new UpdateUrlsImpl(updateUrls); Element documentationElement = parentNode.getChild(ELEMENT_DOCUMENTATION); if (documentationElement != null) { myDocumentationUrl = documentationElement.getAttributeValue(ATTRIBUTE_URL); } Element supportElement = parentNode.getChild(ELEMENT_SUPPORT); if (supportElement != null) { mySupportUrl = supportElement.getAttributeValue(ATTRIBUTE_URL); } Element feedbackElement = parentNode.getChild(ELEMENT_FEEDBACK); if (feedbackElement != null) { myEAPFeedbackUrl = feedbackElement.getAttributeValue(ATTRIBUTE_EAP_URL); myReleaseFeedbackUrl = feedbackElement.getAttributeValue(ATTRIBUTE_RELEASE_URL); } Element whatsnewElement = parentNode.getChild(ELEMENT_WHATSNEW); if (whatsnewElement != null) { myWhatsNewUrl = whatsnewElement.getAttributeValue(ATTRIBUTE_URL); } Element pluginsElement = parentNode.getChild(ELEMENT_PLUGINS); if (pluginsElement != null) { String url = pluginsElement.getAttributeValue(ATTRIBUTE_URL); myPluginManagerUrl = url != null ? url : DEFAULT_PLUGINS_HOST; boolean closed = StringUtil.endsWith(myPluginManagerUrl, "/"); String listUrl = pluginsElement.getAttributeValue(ATTRIBUTE_LIST_URL); myPluginsListUrl = listUrl != null ? listUrl : myPluginManagerUrl + (closed ? "" : "/") + "plugins/list/"; String downloadUrl = pluginsElement.getAttributeValue(ATTRIBUTE_DOWNLOAD_URL); myPluginsDownloadUrl = downloadUrl != null ? downloadUrl : myPluginManagerUrl + (closed ? "" : "/") + "pluginManager/"; if (!getBuild().isSnapshot()) { myBuiltinPluginsUrl = pluginsElement.getAttributeValue(ATTRIBUTE_BUILTIN_URL); } } else { myPluginManagerUrl = DEFAULT_PLUGINS_HOST; myPluginsListUrl = DEFAULT_PLUGINS_HOST + "/plugins/list/"; myPluginsDownloadUrl = DEFAULT_PLUGINS_HOST + "/pluginManager/"; } final String pluginsHost = System.getProperty("idea.plugins.host"); if (pluginsHost != null) { myPluginsListUrl = myPluginsListUrl.replace(DEFAULT_PLUGINS_HOST, pluginsHost); myPluginsDownloadUrl = myPluginsDownloadUrl.replace(DEFAULT_PLUGINS_HOST, pluginsHost); } Element keymapElement = parentNode.getChild(ELEMENT_KEYMAP); if (keymapElement != null) { myWinKeymapUrl = keymapElement.getAttributeValue(ATTRIBUTE_WINDOWS_URL); myMacKeymapUrl = keymapElement.getAttributeValue(ATTRIBUTE_MAC_URL); } myPluginChooserPages = new ArrayList<PluginChooserPage>(); final List children = parentNode.getChildren(PLUGINS_PAGE_ELEMENT_NAME); for(Object child: children) { myPluginChooserPages.add(new PluginChooserPageImpl((Element) child)); } List<Element> essentialPluginsElements = JDOMUtil.getChildren(parentNode, ESSENTIAL_PLUGIN); Collection<String> essentialPluginsIds = ContainerUtil.mapNotNull(essentialPluginsElements, new Function<Element, String>() { @Override public String fun(Element element) { String id = element.getTextTrim(); return StringUtil.isNotEmpty(id) ? id : null; } }); myEssentialPluginsIds = ArrayUtil.toStringArray(essentialPluginsIds); Element statisticsElement = parentNode.getChild(ELEMENT_STATISTICS); if (statisticsElement != null) { myStatisticsSettingsUrl = statisticsElement.getAttributeValue(ATTRIBUTE_STATISTICS_SETTINGS); myStatisticsServiceUrl = statisticsElement.getAttributeValue(ATTRIBUTE_STATISTICS_SERVICE); myStatisticsServiceKey = statisticsElement.getAttributeValue(ATTRIBUTE_STATISTICS_SERVICE_KEY); } else { myStatisticsSettingsUrl = "https://www.jetbrains.com/idea/statistics/stat-assistant.xml"; myStatisticsServiceUrl = "https://www.jetbrains.com/idea/statistics/index.jsp"; myStatisticsServiceKey = null; } Element thirdPartyElement = parentNode.getChild(ELEMENT_THIRD_PARTY); if (thirdPartyElement != null) { myThirdPartySoftwareUrl = thirdPartyElement.getAttributeValue(ATTRIBUTE_URL); } Element tvElement = parentNode.getChild(ELEMENT_JB_TV); if (tvElement != null) { myJetbrainsTvUrl = tvElement.getAttributeValue(ATTRIBUTE_URL); } Element evaluationElement = parentNode.getChild(ELEMENT_EVALUATION); if (evaluationElement != null) { final String url = evaluationElement.getAttributeValue(ATTRIBUTE_EVAL_LICENSE_URL); if (url != null && !url.isEmpty()) { myEvalLicenseUrl = url; } } } private static void setBuildNumber(String apiVersion, String buildNumber) { PluginManagerCore.BUILD_NUMBER = apiVersion != null ? apiVersion : buildNumber; } private static GregorianCalendar parseDate(final String dateString) { @SuppressWarnings("MultipleVariablesInDeclaration") int year = 0, month = 0, day = 0, hour = 0, minute = 0; try { year = Integer.parseInt(dateString.substring(0, 4)); month = Integer.parseInt(dateString.substring(4, 6)); day = Integer.parseInt(dateString.substring(6, 8)); if (dateString.length() > 8) { hour = Integer.parseInt(dateString.substring(8, 10)); minute = Integer.parseInt(dateString.substring(10, 12)); } } catch (Exception ignore) { } if (month > 0) month--; return new GregorianCalendar(year, month, day, hour, minute); } @SuppressWarnings("UseJBColor") private static Color parseColor(final String colorString) { final long rgb = Long.parseLong(colorString, 16); return new Color((int)rgb, rgb > 0xffffff); } @Override public void writeExternal(Element element) throws WriteExternalException { throw new WriteExternalException(); } @Override public List<PluginChooserPage> getPluginChooserPages() { return myPluginChooserPages; } @Override public boolean isEssentialPlugin(@NotNull String pluginId) { return PluginManagerCore.CORE_PLUGIN_ID.equals(pluginId) || ArrayUtil.contains(pluginId, myEssentialPluginsIds); } @Override @NotNull public String getComponentName() { return ApplicationNamesInfo.getComponentName(); } private static class UpdateUrlsImpl implements UpdateUrls { private String myCheckingUrl; private String myPatchesUrl; private UpdateUrlsImpl(Element element) { if (element != null) { myCheckingUrl = element.getAttributeValue("check"); myPatchesUrl = element.getAttributeValue("patches"); } } @Override public String getCheckingUrl() { return myCheckingUrl; } @Override public String getPatchesUrl() { return myPatchesUrl; } } private static class PluginChooserPageImpl implements PluginChooserPage { private final String myTitle; private final String myCategory; private final String myDependentPlugin; private PluginChooserPageImpl(Element e) { myTitle = e.getAttributeValue("title"); myCategory = e.getAttributeValue("category"); myDependentPlugin = e.getAttributeValue("depends"); } @Override public String getTitle() { return myTitle; } @Override public String getCategory() { return myCategory; } @Override public String getDependentPlugin() { return myDependentPlugin; } } private static volatile boolean myInPerformanceTest; public static boolean isInPerformanceTest() { return myInPerformanceTest; } public static void setInPerformanceTest(boolean inPerformanceTest) { myInPerformanceTest = inPerformanceTest; } }
/* * Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.gemstone.gemfire.cache.query.internal.index; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.cache.RegionAttributes; import com.gemstone.gemfire.cache.query.AmbiguousNameException; import com.gemstone.gemfire.cache.query.FunctionDomainException; import com.gemstone.gemfire.cache.query.IndexStatistics; import com.gemstone.gemfire.cache.query.IndexType; import com.gemstone.gemfire.cache.query.NameResolutionException; import com.gemstone.gemfire.cache.query.QueryException; import com.gemstone.gemfire.cache.query.QueryInvocationTargetException; import com.gemstone.gemfire.cache.query.SelectResults; import com.gemstone.gemfire.cache.query.TypeMismatchException; import com.gemstone.gemfire.cache.query.internal.CompiledValue; import com.gemstone.gemfire.cache.query.internal.ExecutionContext; import com.gemstone.gemfire.cache.query.internal.MapIndexable; import com.gemstone.gemfire.cache.query.internal.RuntimeIterator; import com.gemstone.gemfire.cache.query.types.ObjectType; import com.gemstone.gemfire.internal.cache.BucketRegion; import com.gemstone.gemfire.internal.cache.RegionEntry; public class MapRangeIndex extends AbstractIndex { private final RegionEntryToValuesMap entryToMapKeysMap; final private boolean isAllKeys; final String[] patternStr; private final Map<Object, RangeIndex> mapKeyToValueIndex; private final Object[] mapKeys; MapRangeIndex(String indexName, Region region, String fromClause, String indexedExpression, String projectionAttributes, String origFromClause, String origIndxExpr, String[] defintions, boolean isAllKeys, String[] multiIndexingKeysPattern, Object[] mapKeys) { super(indexName, region, fromClause, indexedExpression, projectionAttributes, origFromClause, origIndxExpr, defintions, null); this.mapKeyToValueIndex = new ConcurrentHashMap<Object, RangeIndex>(2, 0.75f, 1); RegionAttributes ra = region.getAttributes(); this.entryToMapKeysMap = new RegionEntryToValuesMap(new java.util.concurrent.ConcurrentHashMap(ra.getInitialCapacity(),ra.getLoadFactor(), ra.getConcurrencyLevel()), true /* user target list as the map keys will be unique*/); this.isAllKeys = isAllKeys; this.mapKeys = mapKeys; if (this.isAllKeys) { this.patternStr = new String[] { new StringBuilder(indexedExpression) .deleteCharAt(indexedExpression.length() - 2).toString() }; } else { this.patternStr = multiIndexingKeysPattern; } } @Override void addMapping(RegionEntry entry) throws IMQException { this.evaluator.evaluate(entry, true); addSavedMappings(entry); clearCurrState(); } private void addSavedMappings(RegionEntry entry) throws IMQException { for (Object rangeInd : this.mapKeyToValueIndex.values()) { ((RangeIndex)rangeInd).addSavedMappings(entry); } } @Override protected InternalIndexStatistics createStats(String indexName) { return new InternalIndexStatistics() { }; } @Override public ObjectType getResultSetType() { return this.evaluator.getIndexResultSetType(); } @Override void instantiateEvaluator(IndexCreationHelper ich) { this.evaluator = new IMQEvaluator(ich); } @Override public void initializeIndex() throws IMQException { evaluator.initializeIndex(); } @Override void lockedQuery(Object key, int operator, Collection results, CompiledValue iterOps, RuntimeIterator runtimeItr, ExecutionContext context, List projAttrib, SelectResults intermediateResults, boolean isIntersection) throws TypeMismatchException, FunctionDomainException, NameResolutionException, QueryInvocationTargetException { Object[] mapKeyAndVal = (Object[])key; RangeIndex ri = this.mapKeyToValueIndex.get(mapKeyAndVal[1]); if (ri != null) { long start = System.nanoTime(); ri.internalIndexStats.incUsesInProgress(1); ri.lockedQuery(mapKeyAndVal[0], operator, results, iterOps, runtimeItr, context, projAttrib, intermediateResults, isIntersection); ri.internalIndexStats.incNumUses(); ri.internalIndexStats.incUsesInProgress(-1); ri.internalIndexStats.incUseTime(System.nanoTime() - start); } } @Override void lockedQuery(Object lowerBoundKey, int lowerBoundOperator, Object upperBoundKey, int upperBoundOperator, Collection results, Set keysToRemove, ExecutionContext context) throws TypeMismatchException, FunctionDomainException, NameResolutionException, QueryInvocationTargetException { throw new UnsupportedOperationException( "Range grouping for MapIndex condition is not supported"); } @Override void lockedQuery(Object key, int operator, Collection results, Set keysToRemove, ExecutionContext context) throws TypeMismatchException, FunctionDomainException, NameResolutionException, QueryInvocationTargetException { Object[] mapKeyAndVal = (Object[])key; RangeIndex ri = this.mapKeyToValueIndex.get(mapKeyAndVal[1]); if (ri != null) { long start = System.nanoTime(); ri.internalIndexStats.incUsesInProgress(1); ri.lockedQuery(mapKeyAndVal[0], operator, results, keysToRemove, context); ri.internalIndexStats.incNumUses(); ri.internalIndexStats.incUsesInProgress(-1); ri.internalIndexStats.incUseTime(System.nanoTime() - start); } } @Override void recreateIndexData() throws IMQException { /* * Asif : Mark the data maps to null & call the initialization code of index */ // TODO:Asif : The statistics data needs to be modified appropriately // for the clear operation this.mapKeyToValueIndex.clear(); this.entryToMapKeysMap.clear(); this.initializeIndex(); } @Override void removeMapping(RegionEntry entry, int opCode) throws IMQException { // this implementation has a reverse map, so it doesn't handle // BEFORE_UPDATE_OP if (opCode == BEFORE_UPDATE_OP) { return; } Object values = this.entryToMapKeysMap.remove(entry); //Values in reverse coould be null if map in region value does not //contain any key which matches to index expression keys. if (values == null ) { return; } if (values instanceof Collection) { Iterator valuesIter = ((Collection)values).iterator(); while (valuesIter.hasNext()) { Object key = valuesIter.next(); RangeIndex ri = (RangeIndex)this.mapKeyToValueIndex.get(key); long start = System.nanoTime(); ri.internalIndexStats.incUpdatesInProgress(1); ri.removeMapping(entry, opCode); ri.internalIndexStats.incUpdatesInProgress(-1); ri.internalIndexStats.incUpdateTime(System.nanoTime() - start); } } else { RangeIndex ri = (RangeIndex)this.mapKeyToValueIndex.get(values); long start = System.nanoTime(); ri.internalIndexStats.incUpdatesInProgress(1); ri.removeMapping(entry, opCode); ri.internalIndexStats.incUpdatesInProgress(-1); ri.internalIndexStats.incUpdateTime(System.nanoTime() - start); } } public boolean clear() throws QueryException { throw new UnsupportedOperationException( "MapType Index method not supported"); } public void clearCurrState() { for (Object rangeInd : this.mapKeyToValueIndex.values()) { ((RangeIndex)rangeInd).clearCurrState(); } } public int getSizeEstimate(Object key, int op, int matchLevel) throws TypeMismatchException { Object[] mapKeyAndVal = (Object[])key; Object mapKey = mapKeyAndVal[1]; RangeIndex ri = this.mapKeyToValueIndex.get(mapKey); if (ri != null) { return ri.getSizeEstimate(mapKeyAndVal[0], op, matchLevel); } else { return 0; } } @Override protected boolean isCompactRangeIndex() { return false; } public IndexType getType() { return IndexType.FUNCTIONAL; } @Override public boolean isMapType() { return true; } @Override void addMapping(Object key, Object value, RegionEntry entry) throws IMQException { assert key instanceof Map; if (this.isAllKeys) { Iterator<Map.Entry<?, ?>> entries = ((Map)key).entrySet().iterator(); while (entries.hasNext()) { Map.Entry<?, ?> mapEntry = entries.next(); Object mapKey = mapEntry.getKey(); Object indexKey = mapEntry.getValue(); this.doIndexAddition(mapKey, indexKey, value, entry); } } else { for (Object mapKey : mapKeys) { Object indexKey = ((Map)key).get(mapKey); if (indexKey != null) { this.doIndexAddition(mapKey, indexKey, value, entry); } } } } @Override void saveMapping(Object key, Object value, RegionEntry entry) throws IMQException { assert key instanceof Map; if (this.isAllKeys) { Iterator<Map.Entry<?, ?>> entries = ((Map)key).entrySet().iterator(); while (entries.hasNext()) { Map.Entry<?, ?> mapEntry = entries.next(); Object mapKey = mapEntry.getKey(); Object indexKey = mapEntry.getValue(); this.saveIndexAddition(mapKey, indexKey, value, entry); } } else { for (Object mapKey : mapKeys) { Object indexKey = ((Map)key).get(mapKey); if (indexKey != null) { this.saveIndexAddition(mapKey, indexKey, value, entry); } } } } private void doIndexAddition(Object mapKey, Object indexKey, Object value, RegionEntry entry) throws IMQException { // Get RangeIndex for it or create it if absent RangeIndex rg = this.mapKeyToValueIndex.get(mapKey); if (rg == null) { IndexStatistics stats = null; PartitionedIndex prIndex = null; if (this.region instanceof BucketRegion) { prIndex = (PartitionedIndex) this.getPRIndex(); stats = prIndex.getStatistics(indexName+"-"+mapKey); } rg = new RangeIndex(indexName+"-"+mapKey, region, fromClause, indexedExpression, projectionAttributes, this.originalFromClause, this.originalIndexedExpression, this.canonicalizedDefinitions, stats); //Shobhit: We need evaluator to verify RegionEntry and IndexEntry inconsistency. rg.evaluator = this.evaluator; this.mapKeyToValueIndex.put(mapKey, rg); } rg.internalIndexStats.incUpdatesInProgress(1); long start = System.nanoTime(); rg.addMapping(indexKey, value, entry); //This call is skipped when addMapping is called from MapRangeIndex //rg.internalIndexStats.incNumUpdates(); rg.internalIndexStats.incUpdatesInProgress(-1); rg.internalIndexStats.incUpdateTime(System.nanoTime() - start); this.entryToMapKeysMap.add(entry, mapKey); } private void saveIndexAddition(Object mapKey, Object indexKey, Object value, RegionEntry entry) throws IMQException { // Get RangeIndex for it or create it if absent RangeIndex rg = this.mapKeyToValueIndex.get(mapKey); if (rg == null) { IndexStatistics stats = null; PartitionedIndex prIndex = null; if (this.region instanceof BucketRegion) { prIndex = (PartitionedIndex) this.getPRIndex(); stats = prIndex.getStatistics(indexName+"-"+mapKey); } rg = new RangeIndex(indexName+"-"+mapKey, region, fromClause, indexedExpression, projectionAttributes, this.originalFromClause, this.originalIndexedExpression, this.canonicalizedDefinitions, stats); rg.evaluator = this.evaluator; this.mapKeyToValueIndex.put(mapKey, rg); } rg.internalIndexStats.incUpdatesInProgress(1); long start = System.nanoTime(); rg.saveMapping(indexKey, value, entry); //This call is skipped when addMapping is called from MapRangeIndex //rg.internalIndexStats.incNumUpdates(); rg.internalIndexStats.incUpdatesInProgress(-1); rg.internalIndexStats.incUpdateTime(System.nanoTime() - start); this.entryToMapKeysMap.add(entry, mapKey); } public Map<Object, RangeIndex> getRangeIndexHolderForTesting() { return Collections.unmodifiableMap(this.mapKeyToValueIndex); } public String[] getPatternsForTesting() { return this.patternStr; } public Object[] getMapKeysForTesting() { return this.mapKeys; } @Override public boolean containsEntry(RegionEntry entry) { // TODO:Asif: take care of null mapped entries /* * return (this.entryToValuesMap.containsEntry(entry) || * this.nullMappedEntries.containsEntry(entry) || * this.undefinedMappedEntries .containsEntry(entry)); */ return this.entryToMapKeysMap.containsEntry(entry); } @Override public boolean isMatchingWithIndexExpression(CompiledValue condnExpr, String conditionExprStr, ExecutionContext context) throws AmbiguousNameException, TypeMismatchException, NameResolutionException { if (this.isAllKeys) { // check if the conditionExps is of type MapIndexable.If yes then check // the canonicalized string // stripped of the index arg & see if it matches. if (condnExpr instanceof MapIndexable) { MapIndexable mi = (MapIndexable)condnExpr; CompiledValue recvr = mi.getRecieverSansIndexArgs(); StringBuffer sb = new StringBuffer(); recvr.generateCanonicalizedExpression(sb, context); sb.append('[').append(']'); return sb.toString().equals(this.patternStr[0]); } else { return false; } } else { for (String expr : this.patternStr) { if (expr.equals(conditionExprStr)) { return true; } } return false; } } @Override public boolean isEmpty() { return mapKeyToValueIndex.size() == 0 ? true : false; } }
/* * Copyright 2010-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.kotlin.js.translate.context; import k2php.*; import com.intellij.psi.PsiElement; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.kotlin.descriptors.*; import org.jetbrains.kotlin.descriptors.annotations.Annotations; import org.jetbrains.kotlin.descriptors.impl.AnonymousFunctionDescriptor; import org.jetbrains.kotlin.descriptors.impl.LocalVariableDescriptor; import org.jetbrains.kotlin.descriptors.impl.TypeAliasConstructorDescriptor; import org.jetbrains.kotlin.incremental.components.NoLookupLocation; import org.jetbrains.kotlin.js.backend.ast.*; import org.jetbrains.kotlin.js.backend.ast.metadata.MetadataProperties; import org.jetbrains.kotlin.js.config.JsConfig; import org.jetbrains.kotlin.js.translate.intrinsic.Intrinsics; import org.jetbrains.kotlin.js.translate.reference.ReferenceTranslator; import org.jetbrains.kotlin.js.translate.utils.JsAstUtils; import org.jetbrains.kotlin.js.translate.utils.TranslationUtils; import org.jetbrains.kotlin.name.FqName; import org.jetbrains.kotlin.name.Name; import org.jetbrains.kotlin.psi.KtExpression; import org.jetbrains.kotlin.resolve.BindingContext; import org.jetbrains.kotlin.resolve.BindingTrace; import org.jetbrains.kotlin.resolve.DescriptorUtils; import org.jetbrains.kotlin.resolve.descriptorUtil.DescriptorUtilsKt; import org.jetbrains.kotlin.resolve.scopes.receivers.ExtensionReceiver; import org.jetbrains.kotlin.types.KotlinType; import java.util.*; import static k2php.Php_astKt.phpThisRef; import static org.jetbrains.kotlin.js.descriptorUtils.DescriptorUtilsKt.isCoroutineLambda; import static org.jetbrains.kotlin.js.translate.context.UsageTrackerKt.getNameForCapturedDescriptor; import static org.jetbrains.kotlin.js.translate.utils.AnnotationsUtils.isLibraryObject; import static org.jetbrains.kotlin.js.translate.utils.AnnotationsUtils.isNativeObject; import static org.jetbrains.kotlin.js.translate.utils.BindingUtils.getDescriptorForElement; /** * All the info about the state of the translation process. */ public class TranslationContext { @NotNull private final DynamicContext dynamicContext; @NotNull private final StaticContext staticContext; @NotNull private final AliasingContext aliasingContext; @Nullable private final UsageTracker usageTracker; @Nullable private final TranslationContext parent; @Nullable private final DeclarationDescriptor declarationDescriptor; @Nullable private final ClassDescriptor classDescriptor; @Nullable private final VariableDescriptor continuationParameterDescriptor; @NotNull public static TranslationContext rootContext(@NotNull StaticContext staticContext, @NotNull JsFunction rootFunction) { JsBlock block = new JsBlock(staticContext.getTopLevelStatements()); DynamicContext rootDynamicContext = DynamicContext.rootContext(rootFunction.getScope(), block); AliasingContext rootAliasingContext = AliasingContext.getCleanContext(); return new TranslationContext(null, staticContext, rootDynamicContext, rootAliasingContext, null, null); } private final Map<JsExpression, TemporaryConstVariable> expressionToTempConstVariableCache = new HashMap<JsExpression, TemporaryConstVariable>(); private TranslationContext( @Nullable TranslationContext parent, @NotNull StaticContext staticContext, @NotNull DynamicContext dynamicContext, @NotNull AliasingContext aliasingContext, @Nullable UsageTracker usageTracker, @Nullable DeclarationDescriptor declarationDescriptor ) { this.parent = parent; this.dynamicContext = dynamicContext; this.staticContext = staticContext; this.aliasingContext = aliasingContext; this.usageTracker = usageTracker; this.declarationDescriptor = declarationDescriptor; if (declarationDescriptor instanceof ClassDescriptor) { this.classDescriptor = (ClassDescriptor) declarationDescriptor; } else { this.classDescriptor = parent != null ? parent.classDescriptor : null; } continuationParameterDescriptor = calculateContinuationParameter(); } private VariableDescriptor calculateContinuationParameter() { if (parent != null && parent.declarationDescriptor == declarationDescriptor) { return parent.continuationParameterDescriptor; } if (declarationDescriptor instanceof FunctionDescriptor) { FunctionDescriptor function = (FunctionDescriptor) declarationDescriptor; if (function.isSuspend() && !(function instanceof AnonymousFunctionDescriptor)) { ClassDescriptor continuationDescriptor = DescriptorUtilKt.findContinuationClassDescriptor(getCurrentModule(), NoLookupLocation.FROM_BACKEND); return new LocalVariableDescriptor( declarationDescriptor, Annotations.Companion.getEMPTY(), Name.identifier("continuation"), continuationDescriptor.getDefaultType(), /* mutable = */ false, /* delegated = */ false, SourceElement.NO_SOURCE); } } return null; } @NotNull public Collection<StaticContext.ImportedModule> getImportedModules() { return staticContext.getImportedModules(); } @Nullable public UsageTracker usageTracker() { return usageTracker; } @NotNull public DynamicContext dynamicContext() { return dynamicContext; } @NotNull public TranslationContext contextWithScope(@NotNull JsFunction fun) { return this.newFunctionBody(fun, aliasingContext, declarationDescriptor); } @NotNull private TranslationContext newFunctionBody( @NotNull JsFunction fun, @Nullable AliasingContext aliasingContext, DeclarationDescriptor descriptor ) { DynamicContext dynamicContext = DynamicContext.newContext(fun.getScope(), fun.getBody()); if (aliasingContext == null) { aliasingContext = this.aliasingContext.inner(); } return new TranslationContext(this, this.staticContext, dynamicContext, aliasingContext, this.usageTracker, descriptor); } @NotNull public TranslationContext newFunctionBodyWithUsageTracker(@NotNull JsFunction fun, @NotNull MemberDescriptor descriptor) { DynamicContext dynamicContext = DynamicContext.newContext(fun.getScope(), fun.getBody()); UsageTracker usageTracker = new UsageTracker(this.usageTracker, descriptor, fun.getScope()); return new TranslationContext(this, this.staticContext, dynamicContext, this.aliasingContext.inner(), usageTracker, descriptor); } @NotNull public TranslationContext innerWithUsageTracker(@NotNull JsScope scope, @NotNull MemberDescriptor descriptor) { UsageTracker usageTracker = new UsageTracker(this.usageTracker, descriptor, scope); return new TranslationContext(this, staticContext, dynamicContext, aliasingContext.inner(), usageTracker, descriptor); } @NotNull public TranslationContext inner(@NotNull MemberDescriptor descriptor) { return new TranslationContext(this, staticContext, dynamicContext, aliasingContext.inner(), usageTracker, descriptor); } @NotNull public TranslationContext innerBlock(@NotNull JsBlock block) { return new TranslationContext(this, staticContext, dynamicContext.innerBlock(block), aliasingContext, usageTracker, this.declarationDescriptor); } @NotNull public TranslationContext innerBlock() { return innerBlock(new JsBlock()); } @NotNull public TranslationContext newDeclaration(@NotNull DeclarationDescriptor descriptor) { JsBlock innerBlock = getBlockForDescriptor(descriptor); if (innerBlock == null) { innerBlock = dynamicContext.jsBlock(); } DynamicContext dynamicContext = DynamicContext.newContext(getScopeForDescriptor(descriptor), innerBlock); return new TranslationContext(this, staticContext, dynamicContext, aliasingContext, usageTracker, descriptor); } @NotNull private TranslationContext innerWithAliasingContext(AliasingContext aliasingContext) { return new TranslationContext(this, staticContext, dynamicContext, aliasingContext, usageTracker, declarationDescriptor); } @NotNull public TranslationContext innerContextWithAliased(@NotNull DeclarationDescriptor correspondingDescriptor, @NotNull JsExpression alias) { return this.innerWithAliasingContext(aliasingContext.inner(correspondingDescriptor, alias)); } @NotNull public TranslationContext innerContextWithAliasesForExpressions(@NotNull Map<KtExpression, JsExpression> aliases) { if (aliases.isEmpty()) return this; return this.innerWithAliasingContext(aliasingContext.withExpressionsAliased(aliases)); } @NotNull public TranslationContext innerContextWithDescriptorsAliased(@NotNull Map<DeclarationDescriptor, JsExpression> aliases) { if (aliases.isEmpty()) return this; return this.innerWithAliasingContext(aliasingContext.withDescriptorsAliased(aliases)); } @Nullable private JsBlock getBlockForDescriptor(@NotNull DeclarationDescriptor descriptor) { if (descriptor instanceof CallableDescriptor) { return getFunctionObject((CallableDescriptor) descriptor).getBody(); } else { return null; } } @Nullable public ClassDescriptor getClassDescriptor() { return classDescriptor; } @NotNull public BindingContext bindingContext() { return staticContext.getBindingContext(); } @NotNull public BindingTrace bindingTrace() { return staticContext.getBindingTrace(); } @NotNull public JsScope getScopeForDescriptor(@NotNull DeclarationDescriptor descriptor) { return staticContext.getScopeForDescriptor(descriptor); } @NotNull public JsName getNameForElement(@NotNull PsiElement element) { DeclarationDescriptor descriptor = getDescriptorForElement(bindingContext(), element); return getNameForDescriptor(descriptor); } @NotNull public JsName getNameForDescriptor(@NotNull DeclarationDescriptor descriptor) { return staticContext.getNameForDescriptor(descriptor); } @NotNull public JsName getInnerNameForDescriptor(@NotNull DeclarationDescriptor descriptor) { return staticContext.getInnerNameForDescriptor(descriptor); } @NotNull public JsName getNameForObjectInstance(@NotNull ClassDescriptor descriptor) { return staticContext.getNameForObjectInstance(descriptor); } @NotNull public JsExpression getQualifiedReference(@NotNull DeclarationDescriptor descriptor) { if (descriptor instanceof MemberDescriptor && isFromCurrentModule(descriptor) && isPublicInlineFunction()) { staticContext.export((MemberDescriptor) descriptor, true); } return staticContext.getQualifiedReference(descriptor); } @NotNull public JsExpression getInnerReference(@NotNull DeclarationDescriptor descriptor) { if (isNativeObject(descriptor) || isLibraryObject(descriptor)) { return getQualifiedReference(descriptor); } if (descriptor instanceof VariableDescriptor) { VariableDescriptor variableDescriptor = (VariableDescriptor) descriptor; DeclarationDescriptor containingDeclaration = descriptor.getContainingDeclaration(); if (containingDeclaration instanceof PackageFragmentDescriptor) { PackageFragmentDescriptor packageFragmentDescriptor = (PackageFragmentDescriptor) containingDeclaration; FqName fqName = packageFragmentDescriptor.getFqName(); if (fqName.isRoot()) { JsExpression globalRef = new PHPGlobalVarRef(getInnerNameForDescriptor(descriptor)); ShebangKt.setKotlinType(globalRef, variableDescriptor.getType()); return globalRef; } } JsExpression fuck = JsAstUtils.purePHPNameRefFqn(getInnerNameForDescriptor(descriptor), null);; return fuck; } else { return JsAstUtils.pureFqn(getInnerNameForDescriptor(descriptor), null); } } @NotNull public JsName getNameForBackingField(@NotNull PropertyDescriptor property) { return staticContext.getNameForBackingField(property); } @NotNull public TemporaryVariable declareTemporary(@Nullable JsExpression initExpression) { return dynamicContext.declareTemporary(initExpression); } @NotNull public JsExpression defineTemporary(@NotNull JsExpression initExpression) { TemporaryVariable var = dynamicContext.declareTemporary(initExpression); addStatementToCurrentBlock(var.assignmentStatement()); JsNameRef res = var.reference(); return res; } @NotNull public JsExpression cacheExpressionIfNeeded(@NotNull JsExpression expression) { return TranslationUtils.isCacheNeeded(expression) ? defineTemporary(expression) : expression; } @NotNull public TemporaryConstVariable getOrDeclareTemporaryConstVariable(@NotNull JsExpression expression) { TemporaryConstVariable tempVar = expressionToTempConstVariableCache.get(expression); if (tempVar == null) { TemporaryVariable tmpVar = declareTemporary(expression); tempVar = new TemporaryConstVariable(tmpVar.name(), tmpVar.assignmentExpression()); expressionToTempConstVariableCache.put(expression, tempVar); expressionToTempConstVariableCache.put(tmpVar.assignmentExpression(), tempVar); } return tempVar; } public void associateExpressionToLazyValue(JsExpression expression, TemporaryConstVariable temporaryConstVariable) { assert expression == temporaryConstVariable.assignmentExpression(); expressionToTempConstVariableCache.put(expression, temporaryConstVariable); } @NotNull public Namer namer() { return staticContext.getNamer(); } @NotNull public Intrinsics intrinsics() { return staticContext.getIntrinsics(); } @NotNull public JsProgram program() { return staticContext.getProgram(); } @NotNull public JsConfig getConfig() { return staticContext.getConfig(); } @NotNull public JsScope scope() { return dynamicContext.getScope(); } @NotNull public AliasingContext aliasingContext() { return aliasingContext; } @NotNull public JsFunction getFunctionObject(@NotNull CallableDescriptor descriptor) { return staticContext.getFunctionWithScope(descriptor); } public void addStatementToCurrentBlock(@NotNull JsStatement statement) { dynamicContext.jsBlock().getStatements().add(statement); } public void addStatementsToCurrentBlock(@NotNull Collection<JsStatement> statements) { dynamicContext.jsBlock().getStatements().addAll(statements); } public void addStatementsToCurrentBlockFrom(@NotNull TranslationContext context) { addStatementsToCurrentBlockFrom(context.dynamicContext().jsBlock()); } public void addStatementsToCurrentBlockFrom(@NotNull JsBlock block) { dynamicContext.jsBlock().getStatements().addAll(block.getStatements()); } public boolean currentBlockIsEmpty() { return dynamicContext.jsBlock().isEmpty(); } public void moveVarsFrom(@NotNull TranslationContext context) { dynamicContext.moveVarsFrom(context.dynamicContext()); } @NotNull public JsBlock getCurrentBlock() { return dynamicContext.jsBlock(); } @Nullable public JsExpression getAliasForDescriptor(@NotNull DeclarationDescriptor descriptor) { JsExpression nameRef = captureIfNeedAndGetCapturedName(descriptor); if (nameRef != null) { return nameRef; } return aliasingContext.getAliasForDescriptor(descriptor); } @NotNull public JsExpression getDispatchReceiver(@NotNull ReceiverParameterDescriptor descriptor) { JsExpression alias = getAliasForDescriptor(descriptor); if (alias != null) { return alias; } if (isCoroutineLambda(descriptor.getContainingDeclaration())) { JsNameRef result = new JsNameRef("$$controller$$", JsAstUtils.stateMachineReceiver()); MetadataProperties.setCoroutineController(result, true); return result; } if (DescriptorUtils.isObject(descriptor.getContainingDeclaration())) { if (isConstructorOrDirectScope(descriptor.getContainingDeclaration())) { return phpThisRef(); } else { ClassDescriptor objectDescriptor = (ClassDescriptor) descriptor.getContainingDeclaration(); return ReferenceTranslator.translateAsValueReference(objectDescriptor, this); } } if (descriptor.getValue() instanceof ExtensionReceiver) return phpThisRef(); ClassifierDescriptor classifier = descriptor.getValue().getType().getConstructor().getDeclarationDescriptor(); // TODO: can't tell why this assertion is valid, revisit this code later assert classifier instanceof ClassDescriptor; ClassDescriptor cls = (ClassDescriptor) classifier; assert classDescriptor != null : "Can't get ReceiverParameterDescriptor in top level"; JsExpression receiver = getAliasForDescriptor(classDescriptor.getThisAsReceiverParameter()); if (receiver == null) { receiver = phpThisRef(); } return getDispatchReceiverPath(cls, receiver); } private boolean isConstructorOrDirectScope(DeclarationDescriptor descriptor) { return descriptor == DescriptorUtils.getParentOfType(declarationDescriptor, ClassDescriptor.class, false); } @NotNull private JsExpression getDispatchReceiverPath(@Nullable ClassDescriptor cls, JsExpression thisExpression) { if (cls != null) { JsExpression alias = getAliasForDescriptor(cls); if (alias != null) { return alias; } } if (classDescriptor == cls || parent == null) { return thisExpression; } if (classDescriptor != parent.classDescriptor) { return new JsNameRef(Namer.OUTER_FIELD_NAME, parent.getDispatchReceiverPath(cls, thisExpression)); } else { return parent.getDispatchReceiverPath(cls, thisExpression); } } @Nullable private JsExpression captureIfNeedAndGetCapturedName(DeclarationDescriptor descriptor) { if (usageTracker != null) { usageTracker.used(descriptor); JsName name = getNameForCapturedDescriptor(usageTracker, descriptor); if (name != null) { JsExpression result; if (shouldCaptureViaThis()) { result = phpThisRef(); int depth = getOuterLocalClassDepth(); for (int i = 0; i < depth; ++i) { result = new JsNameRef(Namer.OUTER_FIELD_NAME, result); } result = new JsNameRef(name, result); } else { result = name.makeRef(); } // @fucking if (result instanceof JsNameRef) { JsNameRef jsNameRef = (JsNameRef) result; jsNameRef.setKind(PHPNameRefKind.VAR); } return result; } } return null; } private int getOuterLocalClassDepth() { if (usageTracker == null) return 0; MemberDescriptor capturingDescriptor = usageTracker.getContainingDescriptor(); if (!(capturingDescriptor instanceof ClassDescriptor)) return 0; ClassDescriptor capturingClassDescriptor = (ClassDescriptor) capturingDescriptor; ClassDescriptor currentDescriptor = classDescriptor; if (currentDescriptor == null) return 0; int depth = 0; while (currentDescriptor != capturingClassDescriptor) { DeclarationDescriptor container = currentDescriptor.getContainingDeclaration(); if (!(container instanceof ClassDescriptor)) return 0; currentDescriptor = (ClassDescriptor) container; depth++; } return depth; } private boolean shouldCaptureViaThis() { if (declarationDescriptor == null) return false; if (DescriptorUtils.isDescriptorWithLocalVisibility(declarationDescriptor)) return false; if (declarationDescriptor instanceof ConstructorDescriptor && DescriptorUtils.isDescriptorWithLocalVisibility(declarationDescriptor.getContainingDeclaration())) return false; return true; } @Nullable public DeclarationDescriptor getDeclarationDescriptor() { return declarationDescriptor; } public void putClassOrConstructorClosure(@NotNull MemberDescriptor descriptor, @NotNull List<DeclarationDescriptor> closure) { staticContext.putClassOrConstructorClosure(descriptor, closure); } @Nullable public List<DeclarationDescriptor> getClassOrConstructorClosure(@NotNull MemberDescriptor classOrConstructor) { if (classOrConstructor instanceof TypeAliasConstructorDescriptor) { ClassConstructorDescriptor constructorDescriptor = ((TypeAliasConstructorDescriptor) classOrConstructor).getUnderlyingConstructorDescriptor(); return getClassOrConstructorClosure(constructorDescriptor); } List<DeclarationDescriptor> result = staticContext.getClassOrConstructorClosure(classOrConstructor); if (result == null && classOrConstructor instanceof ConstructorDescriptor && ((ConstructorDescriptor) classOrConstructor).isPrimary() ) { result = staticContext.getClassOrConstructorClosure((ClassDescriptor) classOrConstructor.getContainingDeclaration()); } return result; } /** * Gets an expression to pass to a constructor of a closure function. I.e. consider the case: * * ``` * fun a(x) { * fun b(y) = x + y * return b * } * ``` * * Here, `x` is a free variable of `b`. Transform `a` into the following form: * * ``` * fun a(x) { * fun b0(x0) = { y -> x0 * y } * return b0(x) * } * ``` * * This function generates arguments passed to newly generated `b0` closure, as well as for the similar case of local class and * object expression. * * @param descriptor represents a free variable or, more generally, free declaration. * @return expression to pass to a closure constructor. */ @NotNull public JsExpression getArgumentForClosureConstructor(@NotNull DeclarationDescriptor descriptor) { JsExpression alias = getAliasForDescriptor(descriptor); if (alias != null) return alias; if (descriptor instanceof ReceiverParameterDescriptor) { return getDispatchReceiver((ReceiverParameterDescriptor) descriptor); } if (isCoroutineLambda(descriptor)) { return phpThisRef(); } return getNameForDescriptor(descriptor).makePHPVarRef(); } @Nullable public JsName getOuterClassReference(ClassDescriptor descriptor) { DeclarationDescriptor container = descriptor.getContainingDeclaration(); if (!(container instanceof ClassDescriptor) || !descriptor.isInner()) { return null; } return staticContext.getScopeForDescriptor(descriptor).declareName(Namer.OUTER_FIELD_NAME); } public void startDeclaration() { ClassDescriptor classDescriptor = this.classDescriptor; if (classDescriptor != null && !(classDescriptor.getContainingDeclaration() instanceof ClassOrPackageFragmentDescriptor)) { staticContext.getDeferredCallSites().put(classDescriptor, new ArrayList<DeferredCallSite>()); } } @NotNull public List<DeferredCallSite> endDeclaration() { List<DeferredCallSite> result = null; if (classDescriptor != null) { result = staticContext.getDeferredCallSites().remove(classDescriptor); } if (result == null) { result = Collections.emptyList(); } return result; } public boolean shouldBeDeferred(@NotNull ClassConstructorDescriptor constructor) { ClassDescriptor classDescriptor = constructor.getContainingDeclaration(); return staticContext.getDeferredCallSites().containsKey(classDescriptor); } public void deferConstructorCall(@NotNull ClassConstructorDescriptor constructor, @NotNull List<JsExpression> invocationArgs) { ClassDescriptor classDescriptor = constructor.getContainingDeclaration(); List<DeferredCallSite> callSites = staticContext.getDeferredCallSites().get(classDescriptor); if (callSites == null) throw new IllegalStateException("This method should be call only when `shouldBeDeferred` method " + "reports true for given constructor: " + constructor); callSites.add(new DeferredCallSite(constructor, invocationArgs, this)); } @Nullable public JsExpression getModuleExpressionFor(@NotNull DeclarationDescriptor descriptor) { return staticContext.getModuleExpressionFor(descriptor); } public void addDeclarationStatement(@NotNull JsStatement statement) { staticContext.getDeclarationStatements().add(statement); } public void addTopLevelStatement(@NotNull JsStatement statement) { staticContext.getTopLevelStatements().add(statement); } @NotNull public JsName createGlobalName(@NotNull String suggestedName) { return staticContext.getRootFunction().getScope().declareTemporaryName(suggestedName); } @NotNull public JsFunction createRootScopedFunction(@NotNull DeclarationDescriptor descriptor) { return createRootScopedFunction(descriptor.toString()); } @NotNull public JsFunction createRootScopedFunction(@NotNull String description) { return new JsFunction(staticContext.getRootFunction().getScope(), new JsBlock(), description); } public void addClass(@NotNull ClassDescriptor classDescriptor) { staticContext.addClass(classDescriptor); } public void export(@NotNull MemberDescriptor descriptor) { staticContext.export(descriptor, false); } public boolean isFromCurrentModule(@NotNull DeclarationDescriptor descriptor) { return staticContext.getCurrentModule() == DescriptorUtilsKt.getModule(descriptor); } public boolean isPublicInlineFunction() { DeclarationDescriptor descriptor = declarationDescriptor; while (descriptor instanceof FunctionDescriptor) { FunctionDescriptor function = (FunctionDescriptor) descriptor; if (function.isInline() && DescriptorUtilsKt.isEffectivelyPublicApi(function)) { return true; } descriptor = descriptor.getContainingDeclaration(); } return false; } @Nullable public VariableDescriptor getContinuationParameterDescriptor() { return continuationParameterDescriptor; } @NotNull public ModuleDescriptor getCurrentModule() { return staticContext.getCurrentModule(); } @Nullable public TranslationContext getParent() { return parent; } }
package org.sistemafinanciero.entity; // Generated 02-may-2014 11:48:28 by Hibernate Tools 4.0.0 import java.math.BigDecimal; import java.math.BigInteger; import java.util.Date; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import org.sistemafinanciero.entity.type.Tipotransaccioncompraventa; /** * TransaccionCompraVenta generated by hbm2java */ @Entity @Table(name = "TRANSACCION_COMPRA_VENTA", schema = "C##BDSISTEMAFINANCIERO") public class TransaccionCompraVenta implements java.io.Serializable { /** * */ private static final long serialVersionUID = 1L; private BigInteger idTransaccionCompraVenta; private HistorialCaja historialCaja; private Moneda monedaRecibida; private Moneda monedaEntregada; private Date fecha; private Date hora; private BigInteger numeroOperacion; private BigDecimal montoRecibido; private BigDecimal montoEntregado; private BigDecimal tipoCambio; private String cliente; private int estado; private String observacion; private Tipotransaccioncompraventa tipoTransaccion; public TransaccionCompraVenta() { } public TransaccionCompraVenta(BigInteger idTransaccionCompraVenta, HistorialCaja historialCaja, Moneda monedaRecibida, Moneda monedaEntregada, Date fecha, Date hora, BigInteger numeroOperacion, BigDecimal montoRecibido, BigDecimal montoEntregado, BigDecimal tipoCambio, boolean estado, Tipotransaccioncompraventa tipoTransaccion) { this.idTransaccionCompraVenta = idTransaccionCompraVenta; this.historialCaja = historialCaja; this.monedaRecibida = monedaRecibida; this.monedaEntregada = monedaEntregada; this.fecha = fecha; this.hora = hora; this.numeroOperacion = numeroOperacion; this.montoRecibido = montoRecibido; this.montoEntregado = montoEntregado; this.tipoCambio = tipoCambio; this.estado = (estado ? 1 : 0); this.tipoTransaccion = tipoTransaccion; } public TransaccionCompraVenta(BigInteger idTransaccionCompraVenta, HistorialCaja historialCaja, Moneda monedaRecibida, Moneda monedaEntregada, Date fecha, Date hora, BigInteger numeroOperacion, BigDecimal montoRecibido, BigDecimal montoEntregado, BigDecimal tipoCambio, String referencia, boolean estado, String observacion, Tipotransaccioncompraventa tipoTransaccion) { this.idTransaccionCompraVenta = idTransaccionCompraVenta; this.historialCaja = historialCaja; this.monedaRecibida = monedaRecibida; this.monedaEntregada = monedaEntregada; this.fecha = fecha; this.hora = hora; this.numeroOperacion = numeroOperacion; this.montoRecibido = montoRecibido; this.montoEntregado = montoEntregado; this.tipoCambio = tipoCambio; this.setCliente(referencia); this.estado = (estado ? 1 : 0); ; this.observacion = observacion; this.tipoTransaccion = tipoTransaccion; } @GeneratedValue(strategy = GenerationType.SEQUENCE, generator="secuencia_transaccion_cv") @SequenceGenerator(name="secuencia_transaccion_cv", initialValue=1, allocationSize=1, sequenceName="TRANSACCION_SEQUENCE") @Id @Column(name = "ID_TRANSACCION_COMPRA_VENTA", unique = true, nullable = false, precision = 22, scale = 0) public BigInteger getIdTransaccionCompraVenta() { return this.idTransaccionCompraVenta; } public void setIdTransaccionCompraVenta(BigInteger idTransaccionCompraVenta) { this.idTransaccionCompraVenta = idTransaccionCompraVenta; } @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "ID_HISTORIAL_CAJA", nullable = false) public HistorialCaja getHistorialCaja() { return this.historialCaja; } public void setHistorialCaja(HistorialCaja historialCaja) { this.historialCaja = historialCaja; } @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "ID_MONEDA_RECIBIDO", nullable = false) public Moneda getMonedaRecibida() { return this.monedaRecibida; } public void setMonedaRecibida(Moneda monedaRecibida) { this.monedaRecibida = monedaRecibida; } @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "ID_MONEDA_ENTREGADO", nullable = false) public Moneda getMonedaEntregada() { return this.monedaEntregada; } public void setMonedaEntregada(Moneda monedaEntregada) { this.monedaEntregada = monedaEntregada; } @Temporal(TemporalType.DATE) @Column(name = "FECHA", nullable = false, length = 7) public Date getFecha() { return this.fecha; } public void setFecha(Date fecha) { this.fecha = fecha; } @Column(name = "HORA", nullable = false) public Date getHora() { return this.hora; } public void setHora(Date hora) { this.hora = hora; } @Column(name = "NUMERO_OPERACION", nullable = false, precision = 22, scale = 0) public BigInteger getNumeroOperacion() { return this.numeroOperacion; } public void setNumeroOperacion(BigInteger numeroOperacion) { this.numeroOperacion = numeroOperacion; } @Column(name = "MONTO_RECIBIDO", nullable = false, precision = 18) public BigDecimal getMontoRecibido() { return this.montoRecibido; } public void setMontoRecibido(BigDecimal montoRecibido) { this.montoRecibido = montoRecibido; } @Column(name = "MONTO_ENTREGADO", nullable = false, precision = 18) public BigDecimal getMontoEntregado() { return this.montoEntregado; } public void setMontoEntregado(BigDecimal montoEntregado) { this.montoEntregado = montoEntregado; } @Column(name = "TIPO_CAMBIO", nullable = false, precision = 5, scale = 3) public BigDecimal getTipoCambio() { return this.tipoCambio; } public void setTipoCambio(BigDecimal tipoCambio) { this.tipoCambio = tipoCambio; } @Column(name = "ESTADO", nullable = false, precision = 22, scale = 0) public boolean getEstado() { return (this.estado == 1 ? true : false); } public void setEstado(boolean estado) { this.estado = (estado ? 1 : 0); ; } @Column(name = "OBSERVACION", length = 100, columnDefinition = "nvarchar2") public String getObservacion() { return this.observacion; } public void setObservacion(String observacion) { this.observacion = observacion; } @Enumerated(value = EnumType.STRING) @Column(name = "TIPO_TRANSACCION", nullable = false, length = 12, columnDefinition = "nvarchar2") public Tipotransaccioncompraventa getTipoTransaccion() { return this.tipoTransaccion; } public void setTipoTransaccion(Tipotransaccioncompraventa tipoTransaccion) { this.tipoTransaccion = tipoTransaccion; } @Column(name = "CLIENTE", length = 140, columnDefinition = "nvarchar2") public String getCliente() { return cliente; } public void setCliente(String cliente) { this.cliente = cliente; } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.java.psi.search; import com.intellij.JavaTestUtil; import com.intellij.ide.highlighter.JavaFileType; import com.intellij.ide.impl.ProjectUtil; import com.intellij.ide.todo.TodoConfiguration; import com.intellij.openapi.application.Result; import com.intellij.openapi.application.WriteAction; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ex.ProjectManagerEx; import com.intellij.openapi.projectRoots.impl.ProjectRootUtil; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.roots.ex.ProjectRootManagerEx; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.impl.JavaPsiFacadeEx; import com.intellij.psi.impl.PsiManagerImpl; import com.intellij.psi.impl.cache.impl.todo.TodoIndex; import com.intellij.psi.impl.cache.impl.todo.TodoIndexEntry; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageManagerImpl; import com.intellij.psi.search.*; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.testFramework.PlatformTestCase; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.testFramework.PsiTestCase; import com.intellij.testFramework.PsiTestUtil; import com.intellij.util.ArrayUtil; import com.intellij.util.indexing.FileBasedIndex; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @PlatformTestCase.WrapInCommand public class UpdateCacheTest extends PsiTestCase { @Override protected void setUpProject() throws Exception { myProjectManager = ProjectManagerEx.getInstanceEx(); LOG.assertTrue(myProjectManager != null, "Cannot instantiate ProjectManager component"); File projectFile = getIprFile(); loadAndSetupProject(projectFile.getPath()); } private void loadAndSetupProject(String path) throws Exception { LocalFileSystem.getInstance().refreshIoFiles(myFilesToDelete); myProject = ProjectManager.getInstance().loadAndOpenProject(path); setUpModule(); final String root = JavaTestUtil.getJavaTestDataPath() + "/psi/search/updateCache"; PsiTestUtil.createTestProjectStructure(myProject, myModule, root, myFilesToDelete); setUpJdk(); myProjectManager.openTestProject(myProject); runStartupActivities(); } @Override protected void tearDown() throws Exception { ProjectManager.getInstance().closeProject(myProject); super.tearDown(); } public void testFileCreation() { PsiDirectory root = ProjectRootUtil.getAllContentRoots(myProject) [0]; PsiFile file = PsiFileFactory.getInstance(myProject).createFileFromText("New.java", JavaFileType.INSTANCE, "class A{ Object o;}"); final PsiFile finalFile = file; file = new WriteAction<PsiFile>() { @Override protected void run(@NotNull Result<PsiFile> result) { PsiFile res = (PsiFile)root.add(finalFile); result.setResult(res); } }.execute().throwException().getResultObject(); assertNotNull(file); PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"New.java"}); } public void testExternalFileCreation() throws Exception { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; String newFilePath = root.getPresentableUrl() + File.separatorChar + "New.java"; FileUtil.writeToFile(new File(newFilePath), "class A{ Object o;}".getBytes(CharsetToolkit.UTF8_CHARSET)); VirtualFile file = LocalFileSystem.getInstance().refreshAndFindFileByPath(newFilePath.replace(File.separatorChar, '/')); assertNotNull(file); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"New.java"}); } public void testExternalFileDeletion() { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; VirtualFile file = root.findChild("1.java"); assertNotNull(file); delete(file); PsiClass stringClass = myJavaFacade.findClass("java.lang.String", GlobalSearchScope.allScope(getProject())); assertNotNull(stringClass); checkUsages(stringClass, ArrayUtil.EMPTY_STRING_ARRAY); } public void testExternalFileModification() { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; VirtualFile file = root.findChild("1.java"); assertNotNull(file); setFileText(file, "class A{ Object o;}"); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"1.java"}); } public void testExternalFileModificationWhileProjectClosed() throws Exception { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{}); FileBasedIndex.getInstance().getContainingFiles(TodoIndex.NAME, new TodoIndexEntry("todo", true), GlobalSearchScope.allScope(getProject())); final String projectLocation = myProject.getPresentableUrl(); assert projectLocation != null : myProject; PlatformTestUtil.saveProject(myProject); final VirtualFile content = ModuleRootManager.getInstance(getModule()).getContentRoots()[0]; Project project = myProject; ProjectUtil.closeAndDispose(project); myProject = null; InjectedLanguageManagerImpl.checkInjectorsAreDisposed(project); assertTrue("Project was not disposed", project.isDisposed()); myModule = null; final File file = new File(root.getPath(), "1.java"); assertTrue(file.exists()); FileUtil.writeToFile(file, "class A{ Object o;}".getBytes(CharsetToolkit.UTF8_CHARSET)); root.refresh(false, true); LocalFileSystem.getInstance().refresh(false); myProject = ProjectManager.getInstance().loadAndOpenProject(projectLocation); InjectedLanguageManagerImpl.pushInjectors(getProject()); setUpModule(); setUpJdk(); ProjectManagerEx.getInstanceEx().openTestProject(myProject); UIUtil.dispatchAllInvocationEvents(); // startup activities runStartupActivities(); PsiTestUtil.addSourceContentToRoots(getModule(), content); assertNotNull(myProject); myPsiManager = (PsiManagerImpl) PsiManager.getInstance(myProject); myJavaFacade = JavaPsiFacadeEx.getInstanceEx(myProject); objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"1.java"}); } public void testExternalDirCreation() throws Exception { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; String newFilePath = root.getPresentableUrl() + File.separatorChar + "dir" + File.separatorChar + "New.java"; LOG.assertTrue(new File(newFilePath).getParentFile().mkdir()); FileUtil.writeToFile(new File(newFilePath), "class A{ Object o;}".getBytes(CharsetToolkit.UTF8_CHARSET)); VirtualFile file = LocalFileSystem.getInstance().refreshAndFindFileByPath(newFilePath.replace(File.separatorChar, '/')); assertNotNull(file); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiClass objectClass = myJavaFacade.findClass(CommonClassNames.JAVA_LANG_OBJECT, GlobalSearchScope.allScope(getProject())); assertNotNull(objectClass); checkUsages(objectClass, new String[]{"New.java"}); } public void testExternalDirDeletion() { VirtualFile root = ProjectRootManager.getInstance(myProject).getContentRoots()[0]; VirtualFile file = root.findChild("aDir"); assertNotNull(file); delete(file); PsiClass threadClass = myJavaFacade.findClass("java.lang.Thread", GlobalSearchScope.allScope(getProject())); assertNotNull(threadClass); checkUsages(threadClass, ArrayUtil.EMPTY_STRING_ARRAY); } public void testTodoConfigurationChange() { TodoPattern pattern = new TodoPattern("newtodo", TodoAttributesUtil.createDefault(), true); TodoPattern[] oldPatterns = TodoConfiguration.getInstance().getTodoPatterns(); checkTodos(new String[]{"2.java"}); TodoConfiguration.getInstance().setTodoPatterns(new TodoPattern[]{pattern}); try{ checkTodos(new String[]{"1.java"}); } finally{ TodoConfiguration.getInstance().setTodoPatterns(oldPatterns); checkTodos(new String[]{"2.java"}); } } public void testAddExcludeRoot() { PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches ProjectRootManagerEx rootManager = (ProjectRootManagerEx)ProjectRootManager.getInstance(myProject); final VirtualFile root = rootManager.getContentRoots()[0]; final VirtualFile dir = root.findChild("aDir"); new WriteCommandAction.Simple(getProject()) { @Override protected void run() { VirtualFile newFile = createChildData(dir, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); } }.execute().throwException(); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTestUtil.addExcludedRoot(myModule, dir); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception",GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, new String[]{"1.java"}); checkTodos(new String[]{}); } public void testRemoveExcludeRoot() { ProjectRootManagerEx rootManager = (ProjectRootManagerEx)ProjectRootManager.getInstance(myProject); final VirtualFile root = rootManager.getContentRoots()[0]; final VirtualFile dir = root.findChild("aDir"); PsiTestUtil.addExcludedRoot(myModule, dir); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches new WriteCommandAction.Simple(getProject()) { @Override protected void run() { VirtualFile newFile = createChildData(dir, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); } }.execute().throwException(); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to update caches PsiTestUtil.removeExcludedRoot(myModule, dir); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, new String[]{"1.java", "2.java", "New.java"}); checkTodos(new String[]{"2.java", "New.java"}); } public void testAddSourceRoot() throws Exception{ File dir = createTempDirectory(); final VirtualFile root = LocalFileSystem.getInstance().refreshAndFindFileByPath(dir.getCanonicalPath().replace(File.separatorChar, '/')); new WriteCommandAction.Simple(getProject()) { @Override protected void run() { PsiTestUtil.addContentRoot(myModule, root); VirtualFile newFile = createChildData(root, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); } }.execute().throwException(); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches PsiTestUtil.addSourceRoot(myModule, root); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, new String[]{"1.java", "2.java", "New.java"}); checkTodos(new String[]{"2.java", "New.java"}); } public void testRemoveSourceRoot() { final VirtualFile root = ModuleRootManager.getInstance(myModule).getContentRoots()[0]; PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches new WriteCommandAction.Simple(getProject()) { @Override protected void run() { VirtualFile newFile = createChildData(root, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); } }.execute().throwException(); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to update caches VirtualFile[] sourceRoots = ModuleRootManager.getInstance(myModule).getSourceRoots(); LOG.assertTrue(sourceRoots.length == 1); PsiTestUtil.removeSourceRoot(myModule, sourceRoots[0]); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); // currently it actually finds usages by FQN due to Java PSI enabled for out-of-source java files // so the following check is disabled //checkUsages(exceptionClass, new String[]{}); checkTodos(new String[]{"2.java", "New.java"}); } public void testAddProjectRoot() throws Exception{ File dir = createTempDirectory(); final VirtualFile root = LocalFileSystem.getInstance().refreshAndFindFileByPath(dir.getCanonicalPath().replace(File.separatorChar, '/')); new WriteCommandAction.Simple(getProject()) { @Override protected void run() { PsiTestUtil.addSourceRoot(myModule, root); VirtualFile newFile = createChildData(root, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); } }.execute().throwException(); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiSearchHelper.SERVICE.getInstance(myProject).processAllFilesWithWord("aaa", GlobalSearchScope.allScope(myProject), psiFile -> true, true); // to initialize caches /* rootManager.startChange(); rootManager.addRoot(root, ProjectRootType.PROJECT); rootManager.finishChange(); */ PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, new String[]{"1.java", "2.java", "New.java"}); checkTodos(new String[]{"2.java", "New.java"}); } public void testSCR6066() { ProjectRootManagerEx rootManager = (ProjectRootManagerEx)ProjectRootManager.getInstance(myProject); final VirtualFile root = rootManager.getContentRoots()[0]; PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to initialize caches new WriteCommandAction.Simple(getProject()) { @Override protected void run() { VirtualFile newFile = createChildData(root, "New.java"); setFileText(newFile, "class A{ Exception e;} //todo"); } }.execute().throwException(); PsiDocumentManager.getInstance(myProject).commitAllDocuments(); PsiTodoSearchHelper.SERVICE.getInstance(myProject).findFilesWithTodoItems(); // to update caches PsiTestUtil.addExcludedRoot(myModule, root); PsiClass exceptionClass = myJavaFacade.findClass("java.lang.Exception", GlobalSearchScope.allScope(getProject())); assertNotNull(exceptionClass); checkUsages(exceptionClass, new String[]{}); checkTodos(new String[]{}); } private void checkUsages(PsiElement element, @NonNls String[] expectedFiles){ PsiReference[] refs = ReferencesSearch.search(element, GlobalSearchScope.projectScope(myProject), false).toArray( PsiReference.EMPTY_ARRAY); List<PsiFile> files = new ArrayList<>(); for (PsiReference ref : refs) { PsiFile file = ref.getElement().getContainingFile(); if (!files.contains(file)) { files.add(file); } } assertEquals(expectedFiles.length, files.size()); Collections.sort(files, (file1, file2) -> file1.getName().compareTo(file2.getName())); Arrays.sort(expectedFiles); for(int i = 0; i < expectedFiles.length; i++){ String name = expectedFiles[i]; PsiFile file = files.get(i); assertEquals(name, file.getName()); } } private void checkTodos(@NonNls String[] expectedFiles){ PsiTodoSearchHelper helper = PsiTodoSearchHelper.SERVICE.getInstance(myProject); PsiFile[] files = helper.findFilesWithTodoItems(); assertEquals(expectedFiles.length, files.length); Arrays.sort(files, (file1, file2) -> file1.getName().compareTo(file2.getName())); Arrays.sort(expectedFiles); for(int i = 0; i < expectedFiles.length; i++){ String name = expectedFiles[i]; PsiFile file = files[i]; assertEquals(name, file.getName()); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import io.hops.exception.StorageException; import io.hops.exception.TransactionContextException; import io.hops.leader_election.node.SortedActiveNodeListPBImpl; import io.hops.metadata.HdfsStorageFactory; import io.hops.security.Users; import io.hops.transaction.handler.HDFSOperationType; import io.hops.transaction.handler.HopsTransactionalRequestHandler; import io.hops.transaction.lock.LockFactory; import io.hops.transaction.lock.TransactionLockTypes; import io.hops.transaction.lock.TransactionLocks; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.protocol.FSLimitException; import org.apache.hadoop.hdfs.protocol.FSLimitException.MaxDirectoryItemsExceededException; import org.apache.hadoop.hdfs.protocol.FSLimitException.PathComponentTooLongException; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.junit.Before; import org.junit.Test; import java.io.IOException; import java.util.Collections; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.anyObject; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class TestFsLimits { static Configuration conf; static INode[] inodes; static FSDirectory fs; static boolean fsIsReady; static PermissionStatus perms = new PermissionStatus("admin", "admin", FsPermission.getDefault()); static INodeDirectoryWithQuota rootInode; static private FSNamesystem getMockNamesystem() { FSNamesystem fsn = mock(FSNamesystem.class); when(fsn.createFsOwnerPermissions((FsPermission) anyObject())).thenReturn( new PermissionStatus("root", "wheel", FsPermission.getDefault())); NameNode nn = mock(NameNode.class); when(nn.getActiveNameNodes()) .thenReturn(new SortedActiveNodeListPBImpl(Collections.EMPTY_LIST)); when(fsn.getNameNode()).thenReturn(nn); return fsn; } private void initFS() throws StorageException, IOException { HdfsStorageFactory.setConfiguration(conf); assert (HdfsStorageFactory.formatStorage()); Users.addUserToGroup(perms.getUserName(), perms.getGroupName()); rootInode = FSDirectory.createRootInode(perms, true); inodes = new INode[]{rootInode, null}; fs = null; fsIsReady = true; } private static class TestFSDirectory extends FSDirectory { public TestFSDirectory() throws IOException { super(getMockNamesystem(), conf); setReady(fsIsReady); } @Override public <T extends INode> void verifyFsLimits(INode[] pathComponents, int pos, T child) throws FSLimitException, StorageException, TransactionContextException { super.verifyFsLimits(pathComponents, pos, child); } } @Before public void setUp() throws IOException { conf = new Configuration(); initFS(); } @Test public void testDefaultMaxComponentLength() { int maxComponentLength = conf.getInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY, DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_DEFAULT); assertEquals(0, maxComponentLength); } @Test public void testDefaultMaxDirItems() { int maxDirItems = conf.getInt(DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_KEY, DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_DEFAULT); assertEquals(0, maxDirItems); } @Test public void testNoLimits() throws Exception { addChildWithName("1", null); addChildWithName("22", null); addChildWithName("333", null); addChildWithName("4444", null); addChildWithName("55555", null); } @Test public void testMaxComponentLength() throws Exception { conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY, 2); addChildWithName("1", null); addChildWithName("22", null); addChildWithName("333", PathComponentTooLongException.class); addChildWithName("4444", PathComponentTooLongException.class); } @Test public void testMaxDirItems() throws Exception { conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_KEY, 2); addChildWithName("1", null); addChildWithName("22", null); addChildWithName("333", MaxDirectoryItemsExceededException.class); addChildWithName("4444", MaxDirectoryItemsExceededException.class); } @Test public void testMaxComponentsAndMaxDirItems() throws Exception { conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY, 3); conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_KEY, 2); addChildWithName("1", null); addChildWithName("22", null); addChildWithName("333", MaxDirectoryItemsExceededException.class); addChildWithName("4444", PathComponentTooLongException.class); } @Test public void testDuringEditLogs() throws Exception { conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY, 3); conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_DIRECTORY_ITEMS_KEY, 2); fsIsReady = false; addChildWithName("1", null); addChildWithName("22", null); addChildWithName("333", null); addChildWithName("4444", null); } private static int id = 1 + INodeDirectory.ROOT_ID; private void addChildWithName(final String name, final Class<?> expected) throws Exception { HopsTransactionalRequestHandler handler = new HopsTransactionalRequestHandler(HDFSOperationType.TEST) { @Override public void acquireLock(TransactionLocks locks) throws IOException { LockFactory lf = LockFactory.getInstance(); locks.add(lf.getINodeLock(getMockNamesystem().getNameNode(), TransactionLockTypes.INodeLockType.WRITE_ON_TARGET_AND_PARENT, TransactionLockTypes.INodeResolveType.PATH_AND_IMMEDIATE_CHILDREN, "/", "/" + name)); } @Override public Object performTask() throws StorageException, IOException { // have to create after the caller has had a chance to set conf values if (fs == null) { fs = new TestFSDirectory(); } INode child = new INodeDirectory(name, perms); child.setIdNoPersistance(id++); child.setLocalName(name); child.setPartitionIdNoPersistance(INodeDirectory.ROOT_ID); Class<?> generated = null; try { fs.verifyFsLimits(inodes, 1, child); INodeDirectoryWithQuota.getRootDir().addChild(child, false); } catch (QuotaExceededException e) { generated = e.getClass(); } assertEquals(expected, generated); return null; } }; handler.handle(); } }
/* * Copyright 2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.vertx.blueprint.microservice.payment; import io.vertx.blueprint.microservice.payment.PaymentQueryService; import io.vertx.core.Vertx; import io.vertx.core.Handler; import io.vertx.core.AsyncResult; import io.vertx.core.eventbus.EventBus; import io.vertx.core.eventbus.Message; import io.vertx.core.eventbus.MessageConsumer; import io.vertx.core.eventbus.DeliveryOptions; import io.vertx.core.eventbus.ReplyException; import io.vertx.core.json.JsonObject; import io.vertx.core.json.JsonArray; import java.util.Collection; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; import io.vertx.serviceproxy.ProxyHelper; import io.vertx.serviceproxy.ProxyHandler; import io.vertx.serviceproxy.ServiceException; import io.vertx.serviceproxy.ServiceExceptionMessageCodec; import io.vertx.blueprint.microservice.payment.Payment; import io.vertx.core.AsyncResult; import io.vertx.core.Handler; /* Generated Proxy code - DO NOT EDIT @author Roger the Robot */ @SuppressWarnings({"unchecked", "rawtypes"}) public class PaymentQueryServiceVertxProxyHandler extends ProxyHandler { public static final long DEFAULT_CONNECTION_TIMEOUT = 5 * 60; // 5 minutes private final Vertx vertx; private final PaymentQueryService service; private final long timerID; private long lastAccessed; private final long timeoutSeconds; public PaymentQueryServiceVertxProxyHandler(Vertx vertx, PaymentQueryService service) { this(vertx, service, DEFAULT_CONNECTION_TIMEOUT); } public PaymentQueryServiceVertxProxyHandler(Vertx vertx, PaymentQueryService service, long timeoutInSecond) { this(vertx, service, true, timeoutInSecond); } public PaymentQueryServiceVertxProxyHandler(Vertx vertx, PaymentQueryService service, boolean topLevel, long timeoutSeconds) { this.vertx = vertx; this.service = service; this.timeoutSeconds = timeoutSeconds; try { this.vertx.eventBus().registerDefaultCodec(ServiceException.class, new ServiceExceptionMessageCodec()); } catch (IllegalStateException ex) {} if (timeoutSeconds != -1 && !topLevel) { long period = timeoutSeconds * 1000 / 2; if (period > 10000) { period = 10000; } this.timerID = vertx.setPeriodic(period, this::checkTimedOut); } else { this.timerID = -1; } accessed(); } public MessageConsumer<JsonObject> registerHandler(String address) { MessageConsumer<JsonObject> consumer = vertx.eventBus().<JsonObject>consumer(address).handler(this); this.setConsumer(consumer); return consumer; } private void checkTimedOut(long id) { long now = System.nanoTime(); if (now - lastAccessed > timeoutSeconds * 1000000000) { close(); } } @Override public void close() { if (timerID != -1) { vertx.cancelTimer(timerID); } super.close(); } private void accessed() { this.lastAccessed = System.nanoTime(); } public void handle(Message<JsonObject> msg) { try { JsonObject json = msg.body(); String action = msg.headers().get("action"); if (action == null) { throw new IllegalStateException("action not specified"); } accessed(); switch (action) { case "initializePersistence": { service.initializePersistence(createHandler(msg)); break; } case "addPaymentRecord": { service.addPaymentRecord(json.getJsonObject("payment") == null ? null : new io.vertx.blueprint.microservice.payment.Payment(json.getJsonObject("payment")), createHandler(msg)); break; } case "retrievePaymentRecord": { service.retrievePaymentRecord((java.lang.String)json.getValue("payId"), res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { msg.reply(res.result() == null ? null : res.result().toJson()); } }); break; } default: { throw new IllegalStateException("Invalid action: " + action); } } } catch (Throwable t) { msg.reply(new ServiceException(500, t.getMessage())); throw t; } } private <T> Handler<AsyncResult<T>> createHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { if (res.result() != null && res.result().getClass().isEnum()) { msg.reply(((Enum) res.result()).name()); } else { msg.reply(res.result()); } } }; } private <T> Handler<AsyncResult<List<T>>> createListHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { msg.reply(new JsonArray(res.result())); } }; } private <T> Handler<AsyncResult<Set<T>>> createSetHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { msg.reply(new JsonArray(new ArrayList<>(res.result()))); } }; } private Handler<AsyncResult<List<Character>>> createListCharHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { JsonArray arr = new JsonArray(); for (Character chr: res.result()) { arr.add((int) chr); } msg.reply(arr); } }; } private Handler<AsyncResult<Set<Character>>> createSetCharHandler(Message msg) { return res -> { if (res.failed()) { if (res.cause() instanceof ServiceException) { msg.reply(res.cause()); } else { msg.reply(new ServiceException(-1, res.cause().getMessage())); } } else { JsonArray arr = new JsonArray(); for (Character chr: res.result()) { arr.add((int) chr); } msg.reply(arr); } }; } private <T> Map<String, T> convertMap(Map map) { return (Map<String, T>)map; } private <T> List<T> convertList(List list) { return (List<T>)list; } private <T> Set<T> convertSet(List list) { return new HashSet<T>((List<T>)list); } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.testFramework; import com.intellij.codeInsight.generation.CommentByLineCommentHandler; import com.intellij.ide.DataManager; import com.intellij.injected.editor.DocumentWindow; import com.intellij.injected.editor.EditorWindow; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationManager; import consulo.util.dataholder.Key; import consulo.testFramework.util.TestPathUtil; import com.intellij.openapi.command.CommandProcessor; import consulo.logging.Logger; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.actionSystem.EditorActionHandler; import com.intellij.openapi.editor.actionSystem.EditorActionManager; import com.intellij.openapi.editor.ex.util.EditorUtil; import com.intellij.openapi.editor.impl.DocumentImpl; import com.intellij.openapi.editor.impl.DesktopEditorImpl; import com.intellij.openapi.editor.impl.TrailingSpacesStripper; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.fileEditor.OpenFileDescriptor; import com.intellij.openapi.fileTypes.InternalStdFileTypes; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.encoding.EncodingProjectManager; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil; import com.intellij.util.FileComparisonFailure; import org.jetbrains.annotations.NonNls; import javax.annotation.Nonnull; import java.io.File; import java.io.IOException; public abstract class LightPlatformCodeInsightTestCase extends LightPlatformTestCase { private static final Logger LOG = Logger.getInstance(LightPlatformCodeInsightTestCase.class); protected static Editor myEditor; protected static PsiFile myFile; protected static VirtualFile myVFile; private static final String CARET_MARKER = "<caret>"; @NonNls private static final String SELECTION_START_MARKER = "<selection>"; @NonNls private static final String SELECTION_END_MARKER = "</selection>"; @Override protected void runTest() throws Throwable { final Throwable[] throwable = {null}; Runnable action = new Runnable() { @Override public void run() { CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() { @Override public void run() { try { doRunTest(); } catch (Throwable t) { throwable[0] = t; } } }, "", null); } }; if (isRunInWriteAction()) { ApplicationManager.getApplication().runWriteAction(action); } else { action.run(); } if (throwable[0] != null) { throw throwable[0]; } } protected void doRunTest() throws Throwable { LightPlatformCodeInsightTestCase.super.runTest(); } protected boolean isRunInWriteAction() { return true; } /** * Configure test from data file. Data file is usual java, xml or whatever file that needs to be tested except it * has &lt;caret&gt; marker where caret should be placed when file is loaded in editor and &lt;selection&gt;&lt;/selection&gt; * denoting selection bounds. * @param filePath - relative path from %IDEA_INSTALLATION_HOME%/testData/ * @throws Exception */ protected void configureByFile(@TestDataFile @NonNls @Nonnull String filePath) { try { String fullPath = getTestDataPath() + filePath; final File ioFile = new File(fullPath); String fileText = FileUtil.loadFile(ioFile, CharsetToolkit.UTF8); fileText = StringUtil.convertLineSeparators(fileText); configureFromFileText(ioFile.getName(), fileText); } catch (IOException e) { throw new RuntimeException(e); } } @NonNls @Nonnull protected String getTestDataPath() { return TestPathUtil.getTestDataPath(); } protected VirtualFile getVirtualFile(@NonNls String filePath) { String fullPath = getTestDataPath() + filePath; final VirtualFile vFile = LocalFileSystem.getInstance().findFileByPath(fullPath.replace(File.separatorChar, '/')); assertNotNull("file " + fullPath + " not found", vFile); return vFile; } /** * Same as configureByFile but text is provided directly. * @param fileName - name of the file. * @param fileText - data file text. * @throws java.io.IOException */ @Nonnull protected static Document configureFromFileText(@NonNls @Nonnull final String fileName, @NonNls @Nonnull final String fileText) throws IOException { return ApplicationManager.getApplication().runWriteAction(new Computable<Document>() { @Override public Document compute() { if (myVFile != null) { // avoid messing with invalid files, in case someone calls configureXXX() several times PsiDocumentManager.getInstance(ourProject).commitAllDocuments(); FileEditorManager.getInstance(ourProject).closeFile(myVFile); try { myVFile.delete(this); } catch (IOException e) { LOG.error(e); } myVFile = null; } final Document fakeDocument = new DocumentImpl(fileText); int caretIndex = fileText.indexOf(CARET_MARKER); int selStartIndex = fileText.indexOf(SELECTION_START_MARKER); int selEndIndex = fileText.indexOf(SELECTION_END_MARKER); final RangeMarker caretMarker = caretIndex >= 0 ? fakeDocument.createRangeMarker(caretIndex, caretIndex) : null; final RangeMarker selStartMarker = selStartIndex >= 0 ? fakeDocument.createRangeMarker(selStartIndex, selStartIndex) : null; final RangeMarker selEndMarker = selEndIndex >= 0 ? fakeDocument.createRangeMarker(selEndIndex, selEndIndex) : null; if (caretMarker != null) { fakeDocument.deleteString(caretMarker.getStartOffset(), caretMarker.getStartOffset() + CARET_MARKER.length()); } if (selStartMarker != null) { fakeDocument.deleteString(selStartMarker.getStartOffset(), selStartMarker.getStartOffset() + SELECTION_START_MARKER.length()); } if (selEndMarker != null) { fakeDocument.deleteString(selEndMarker.getStartOffset(), selEndMarker.getStartOffset() + SELECTION_END_MARKER.length()); } String newFileText = fakeDocument.getText(); Document document; try { document = setupFileEditorAndDocument(fileName, newFileText); } catch (IOException e) { throw new RuntimeException(e); } setupCaret(caretMarker, newFileText); setupSelection(selStartMarker, selEndMarker); setupEditorForInjectedLanguage(); return document; } }); } private static void setupSelection(final RangeMarker selStartMarker, final RangeMarker selEndMarker) { if (selStartMarker != null) { myEditor.getSelectionModel().setSelection(selStartMarker.getStartOffset(), selEndMarker.getStartOffset()); } } private static void setupCaret(final RangeMarker caretMarker, String fileText) { if (caretMarker != null) { int caretLine = StringUtil.offsetToLineNumber(fileText, caretMarker.getStartOffset()); int caretCol = EditorUtil.calcColumnNumber(null, myEditor.getDocument().getText(), myEditor.getDocument().getLineStartOffset(caretLine), caretMarker.getStartOffset(), CodeStyleSettingsManager.getSettings(getProject()).getIndentOptions(InternalStdFileTypes.JAVA).TAB_SIZE); LogicalPosition pos = new LogicalPosition(caretLine, caretCol); myEditor.getCaretModel().moveToLogicalPosition(pos); } } protected static Editor createEditor(@Nonnull VirtualFile file) { Editor editor = FileEditorManager.getInstance(getProject()).openTextEditor(new OpenFileDescriptor(getProject(), file, 0), false); ((DesktopEditorImpl)editor).setCaretActive(); return editor; } @Nonnull private static Document setupFileEditorAndDocument(@Nonnull String fileName, @Nonnull String fileText) throws IOException { EncodingProjectManager.getInstance(getProject()).setEncoding(null, CharsetToolkit.UTF8_CHARSET); EncodingProjectManager.getInstance(ProjectManager.getInstance().getDefaultProject()).setEncoding(null, CharsetToolkit.UTF8_CHARSET); PostprocessReformattingAspect.getInstance(ourProject).doPostponedFormatting(); deleteVFile(); myVFile = getSourceRoot().createChildData(null, fileName); VfsUtil.saveText(myVFile, fileText); final FileDocumentManager manager = FileDocumentManager.getInstance(); final Document document = manager.getDocument(myVFile); assertNotNull("Can't create document for '" + fileName + "'", document); manager.reloadFromDisk(document); document.insertString(0, " "); document.deleteString(0, 1); myFile = getPsiManager().findFile(myVFile); assertNotNull("Can't create PsiFile for '" + fileName + "'. Unknown file type most probably.", myFile); assertTrue(myFile.isPhysical()); myEditor = createEditor(myVFile); myVFile.setCharset(CharsetToolkit.UTF8_CHARSET); PsiDocumentManager.getInstance(getProject()).commitAllDocuments(); return document; } private static void setupEditorForInjectedLanguage() { Editor editor = InjectedLanguageUtil.getEditorForInjectedLanguageNoCommit(myEditor, myFile); if (editor instanceof EditorWindow) { myFile = ((EditorWindow)editor).getInjectedFile(); myEditor = editor; } } private static void deleteVFile() { if (myVFile != null) { ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { try { myVFile.delete(this); } catch (IOException e) { LOG.error(e); } } }); } } @Override protected void tearDown() throws Exception { FileEditorManager editorManager = FileEditorManager.getInstance(getProject()); VirtualFile[] openFiles = editorManager.getOpenFiles(); for (VirtualFile openFile : openFiles) { editorManager.closeFile(openFile); } deleteVFile(); myEditor = null; myFile = null; myVFile = null; super.tearDown(); } /** * Validates that content of the editor as well as caret and selection matches one specified in data file that * should be formed with the same format as one used in configureByFile * @param filePath - relative path from %IDEA_INSTALLATION_HOME%/testData/ * @throws Exception */ protected void checkResultByFile(@NonNls @Nonnull String filePath) { checkResultByFile(null, filePath, false); } /** * Validates that content of the editor as well as caret and selection matches one specified in data file that * should be formed with the same format as one used in configureByFile * @param message - this check specific message. Added to text, caret position, selection checking. May be null * @param filePath - relative path from %IDEA_INSTALLATION_HOME%/testData/ * @param ignoreTrailingSpaces - whether trailing spaces in editor in data file should be stripped prior to comparing. * @throws Exception */ protected void checkResultByFile(@javax.annotation.Nullable String message, @Nonnull String filePath, final boolean ignoreTrailingSpaces) { bringRealEditorBack(); getProject().getComponent(PostprocessReformattingAspect.class).doPostponedFormatting(); if (ignoreTrailingSpaces) { final Editor editor = myEditor; TrailingSpacesStripper.strip(editor.getDocument(), false, true); EditorUtil.fillVirtualSpaceUntilCaret(editor); } PsiDocumentManager.getInstance(getProject()).commitAllDocuments(); String fullPath = getTestDataPath() + filePath; File ioFile = new File(fullPath); assertTrue(getMessage("Cannot find file " + fullPath, message), ioFile.exists()); String fileText = null; try { fileText = FileUtil.loadFile(ioFile, CharsetToolkit.UTF8); } catch (IOException e) { LOG.error(e); } checkResultByText(message, StringUtil.convertLineSeparators(fileText), ignoreTrailingSpaces, getTestDataPath() + "/" + filePath); } /** * Same as checkResultByFile but text is provided directly. * @param fileText */ protected void checkResultByText(@NonNls @Nonnull String fileText) { checkResultByText(null, fileText, false, null); } /** * Same as checkResultByFile but text is provided directly. * @param message - this check specific message. Added to text, caret position, selection checking. May be null * @param fileText * @param ignoreTrailingSpaces - whether trailing spaces in editor in data file should be stripped prior to comparing. */ protected void checkResultByText(final String message, @Nonnull String fileText, final boolean ignoreTrailingSpaces) { checkResultByText(message, fileText, ignoreTrailingSpaces, null); } /** * Same as checkResultByFile but text is provided directly. * @param message - this check specific message. Added to text, caret position, selection checking. May be null * @param fileText * @param ignoreTrailingSpaces - whether trailing spaces in editor in data file should be stripped prior to comparing. */ protected void checkResultByText(final String message, @Nonnull final String fileText, final boolean ignoreTrailingSpaces, final String filePath) { bringRealEditorBack(); PsiDocumentManager.getInstance(getProject()).commitAllDocuments(); ApplicationManager.getApplication().runWriteAction(new Runnable() { @Override public void run() { final Document document = EditorFactory.getInstance().createDocument(fileText); int caretIndex = fileText.indexOf(CARET_MARKER); int selStartIndex = fileText.indexOf(SELECTION_START_MARKER); int selEndIndex = fileText.indexOf(SELECTION_END_MARKER); final RangeMarker caretMarker = caretIndex >= 0 ? document.createRangeMarker(caretIndex, caretIndex) : null; final RangeMarker selStartMarker = selStartIndex >= 0 ? document.createRangeMarker(selStartIndex, selStartIndex) : null; final RangeMarker selEndMarker = selEndIndex >= 0 ? document.createRangeMarker(selEndIndex, selEndIndex) : null; if (ignoreTrailingSpaces) { ((DocumentImpl)document).stripTrailingSpaces(ourProject); } if (caretMarker != null) { document.deleteString(caretMarker.getStartOffset(), caretMarker.getStartOffset() + CARET_MARKER.length()); } if (selStartMarker != null) { document.deleteString(selStartMarker.getStartOffset(), selStartMarker.getStartOffset() + SELECTION_START_MARKER.length()); } if (selEndMarker != null) { document.deleteString(selEndMarker.getStartOffset(), selEndMarker.getStartOffset() + SELECTION_END_MARKER.length()); } PostprocessReformattingAspect.getInstance(getProject()).doPostponedFormatting(); String newFileText = document.getText(); PsiDocumentManager.getInstance(getProject()).commitAllDocuments(); String fileText = myFile.getText(); String failMessage = getMessage("Text mismatch", message); if (filePath != null && !newFileText.equals(fileText)) { throw new FileComparisonFailure(failMessage, newFileText, fileText, filePath); } assertEquals(failMessage, newFileText, fileText); checkCaretPosition(caretMarker, newFileText, message); checkSelection(selStartMarker, selEndMarker, newFileText, message); } }); } private static String getMessage(@NonNls String engineMessage, String userMessage) { if (userMessage == null) return engineMessage; return userMessage + " [" + engineMessage + "]"; } private static void checkSelection(final RangeMarker selStartMarker, final RangeMarker selEndMarker, String newFileText, String message) { if (selStartMarker != null && selEndMarker != null) { int selStartLine = StringUtil.offsetToLineNumber(newFileText, selStartMarker.getStartOffset()); int selStartCol = selStartMarker.getStartOffset() - StringUtil.lineColToOffset(newFileText, selStartLine, 0); int selEndLine = StringUtil.offsetToLineNumber(newFileText, selEndMarker.getEndOffset()); int selEndCol = selEndMarker.getEndOffset() - StringUtil.lineColToOffset(newFileText, selEndLine, 0); assertEquals( getMessage("selectionStartLine", message), selStartLine + 1, StringUtil.offsetToLineNumber(newFileText, myEditor.getSelectionModel().getSelectionStart()) + 1); assertEquals( getMessage("selectionStartCol", message), selStartCol + 1, myEditor.getSelectionModel().getSelectionStart() - StringUtil.lineColToOffset(newFileText, selStartLine, 0) + 1); assertEquals( getMessage("selectionEndLine", message), selEndLine + 1, StringUtil.offsetToLineNumber(newFileText, myEditor.getSelectionModel().getSelectionEnd()) + 1); assertEquals( getMessage("selectionEndCol", message), selEndCol + 1, myEditor.getSelectionModel().getSelectionEnd() - StringUtil.lineColToOffset(newFileText, selEndLine, 0) + 1); } else { assertTrue(getMessage("must not have selection", message), !myEditor.getSelectionModel().hasSelection()); } } private static void checkCaretPosition(final RangeMarker caretMarker, String newFileText, String message) { if (caretMarker != null) { int caretLine = StringUtil.offsetToLineNumber(newFileText, caretMarker.getStartOffset()); //int caretCol = caretMarker.getStartOffset() - StringUtil.lineColToOffset(newFileText, caretLine, 0); int caretCol = EditorUtil.calcColumnNumber(null, newFileText, StringUtil.lineColToOffset(newFileText, caretLine, 0), caretMarker.getStartOffset(), CodeStyleSettingsManager.getSettings(getProject()).getIndentOptions(InternalStdFileTypes.JAVA).TAB_SIZE); assertEquals(getMessage("caretLine", message), caretLine, myEditor.getCaretModel().getLogicalPosition().line); assertEquals(getMessage("caretColumn", message), caretCol, myEditor.getCaretModel().getLogicalPosition().column); } } @Override public Object getData(@Nonnull Key<?> dataId) { if (PlatformDataKeys.EDITOR == dataId) { return myEditor; } if (dataId == AnActionEvent.injectedId(PlatformDataKeys.EDITOR)) { return InjectedLanguageUtil.getEditorForInjectedLanguageNoCommit(getEditor(), getFile()); } if (LangDataKeys.PSI_FILE == dataId) { return myFile; } if (dataId == AnActionEvent.injectedId(LangDataKeys.PSI_FILE)) { Editor editor = InjectedLanguageUtil.getEditorForInjectedLanguageNoCommit(getEditor(), getFile()); return editor instanceof EditorWindow ? ((EditorWindow)editor).getInjectedFile() : getFile(); } return super.getData(dataId); } /** * @return Editor used in test. */ protected static Editor getEditor() { return myEditor; } /** * @return PsiFile opened in editor used in test */ protected static PsiFile getFile() { return myFile; } protected static VirtualFile getVFile() { return myVFile; } protected static void bringRealEditorBack() { PsiDocumentManager.getInstance(getProject()).commitAllDocuments(); if (myEditor instanceof EditorWindow) { Document document = ((DocumentWindow)myEditor.getDocument()).getDelegate(); myFile = PsiDocumentManager.getInstance(getProject()).getPsiFile(document); myEditor = ((EditorWindow)myEditor).getDelegate(); myVFile = myFile.getVirtualFile(); } } protected void caretUp() { EditorActionManager actionManager = EditorActionManager.getInstance(); EditorActionHandler action = actionManager.getActionHandler(IdeActions.ACTION_EDITOR_MOVE_CARET_UP); action.execute(getEditor(), DataManager.getInstance().getDataContext()); } protected void deleteLine() { EditorActionManager actionManager = EditorActionManager.getInstance(); EditorActionHandler action = actionManager.getActionHandler(IdeActions.ACTION_EDITOR_DELETE_LINE); action.execute(getEditor(), DataManager.getInstance().getDataContext()); } protected static void type(char c) { EditorActionManager actionManager = EditorActionManager.getInstance(); final DataContext dataContext = DataManager.getInstance().getDataContext(); if (c == '\n') { actionManager.getActionHandler(IdeActions.ACTION_EDITOR_ENTER).execute(getEditor(), dataContext); } else if (c == '\b') { actionManager.getActionHandler(IdeActions.ACTION_EDITOR_BACKSPACE).execute(getEditor(), dataContext); } else { actionManager.getTypedAction().actionPerformed(getEditor(), c, dataContext); } } protected static void type(@NonNls String s) { for (char c : s.toCharArray()) { type(c); } } protected static void backspace() { executeAction(IdeActions.ACTION_EDITOR_BACKSPACE); } protected static void delete() { executeAction(IdeActions.ACTION_EDITOR_DELETE); } protected static void home() { executeAction(IdeActions.ACTION_EDITOR_MOVE_LINE_START); } protected static void end() { executeAction(IdeActions.ACTION_EDITOR_MOVE_LINE_END); } protected static void copy() { executeAction(IdeActions.ACTION_EDITOR_COPY); } protected static void paste() { executeAction(IdeActions.ACTION_EDITOR_PASTE); } protected static void moveCaretToPreviousWordWithSelection() { executeAction("EditorPreviousWordWithSelection"); } protected static void moveCaretToNextWordWithSelection() { executeAction("EditorNextWordWithSelection"); } protected static void cutLineBackward() { executeAction("EditorCutLineBackward"); } protected static void cutToLineEnd() { executeAction("EditorCutLineEnd"); } protected static void killToWordStart() { executeAction("EditorKillToWordStart"); } protected static void killToWordEnd() { executeAction("EditorKillToWordEnd"); } protected static void killRegion() { executeAction("EditorKillRegion"); } protected static void killRingSave() { executeAction("EditorKillRingSave"); } protected static void unindent() { executeAction("EditorUnindentSelection"); } protected static void lineComment() { new CommentByLineCommentHandler().invoke(getProject(), getEditor(), getEditor().getCaretModel().getPrimaryCaret(), getFile()); } protected static void executeAction(@NonNls @Nonnull final String actionId) { CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() { @Override public void run() { EditorActionManager actionManager = EditorActionManager.getInstance(); EditorActionHandler actionHandler = actionManager.getActionHandler(actionId); actionHandler.execute(getEditor(), null, DataManager.getInstance().getDataContext()); } }, "", null); } protected static DataContext getCurrentEditorDataContext() { final DataContext defaultContext = DataManager.getInstance().getDataContext(); return new DataContext() { @Override @javax.annotation.Nullable public Object getData(@NonNls Key dataId) { if (PlatformDataKeys.EDITOR == dataId) { return getEditor(); } if (CommonDataKeys.PROJECT == dataId) { return getProject(); } if (LangDataKeys.PSI_FILE == dataId) { return getFile(); } if (LangDataKeys.PSI_ELEMENT == dataId) { PsiFile file = getFile(); if (file == null) return null; Editor editor = getEditor(); if (editor == null) return null; return file.findElementAt(editor.getCaretModel().getOffset()); } return defaultContext.getData(dataId); } }; } }
/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.andes.tools.messagestore.commands; import org.wso2.andes.AMQException; import org.wso2.andes.framing.AMQShortString; import org.wso2.andes.framing.BasicContentHeaderProperties; import org.wso2.andes.framing.FieldTable; import org.wso2.andes.framing.abstraction.MessagePublishInfo; import org.wso2.andes.server.message.AMQMessage; import org.wso2.andes.server.queue.AMQQueue; import org.wso2.andes.server.queue.QueueEntry; import org.wso2.andes.server.message.ServerMessage; import org.wso2.andes.tools.messagestore.MessageStoreTool; import org.wso2.andes.tools.utils.Console; import java.util.LinkedList; import java.util.List; public class Show extends AbstractCommand { protected boolean _amqHeaders = false; protected boolean _routing = false; protected boolean _msgHeaders = false; public Show(MessageStoreTool tool) { super(tool); } public String help() { return "Shows the messages headers."; } public String usage() { return getCommand() + " [show=[all],[msgheaders],[amqheaders],[routing]] [id=<msgid e.g. 1,2,4-10>]"; } public String getCommand() { return "show"; } public void execute(String... args) { assert args.length > 0; assert args[0].equals(getCommand()); if (args.length < 2) { parseArgs("all"); } else { parseArgs(args); } performShow(); } protected void parseArgs(String... args) { List<Long> msgids = null; if (args.length >= 2) { for (String arg : args) { if (arg.startsWith("show=")) { _msgHeaders = arg.contains("msgheaders") || arg.contains("all"); _amqHeaders = arg.contains("amqheaders") || arg.contains("all"); _routing = arg.contains("routing") || arg.contains("all"); } if (arg.startsWith("id=")) { _tool.getState().setMessages(msgids); } }//for args }// if args > 2 } protected void performShow() { if (_tool.getState().getVhost() == null) { _console.println("No Virtualhost selected. 'DuSelect' a Virtualhost first."); return; } AMQQueue _queue = _tool.getState().getQueue(); List<Long> msgids = _tool.getState().getMessages(); if (_queue != null) { List<QueueEntry> messages = _queue.getMessagesOnTheQueue(); if (messages == null || messages.size() == 0) { _console.println("No messages on queue"); return; } List<List> data = createMessageData(msgids, messages, _amqHeaders, _routing, _msgHeaders); if (data != null) { _console.printMap(null, data); } else { String message = "No data to display."; if (msgids != null) { message += " Is message selection correct? " + _tool.getState().printMessages(); } _console.println(message); } } else { _console.println("No Queue specified to show."); } } /** * Create the list data for display from the messages. * * @param msgids The list of message ids to display * @param messages A list of messages to format and display. * @param showHeaders should the header info be shown * @param showRouting show the routing info be shown * @param showMessageHeaders show the msg headers be shown * @return the formated data lists for printing */ protected List<List> createMessageData(List<Long> msgids, List<QueueEntry> messages, boolean showHeaders, boolean showRouting, boolean showMessageHeaders) { // Currenly exposed message properties // //Printing the content Body // msg.getContentBodyIterator(); // //Print the Headers // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getAppId(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getAppIdAsString(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getClusterId(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getContentType(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getCorrelationId(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getDeliveryMode(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getEncoding(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getExpiration(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getHeaders(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getMessageNumber(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getPriority(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getPropertyFlags(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getReplyTo(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getTimestamp(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getType(); // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getUserId(); // // //Print out all the property names // ((BasicContentHeaderProperties)msg.getContentHeaderBody().properties).getHeaders().getPropertyNames(); // // msg.getMessageNumber(); // msg.getSize(); // msg.get_arrivalTime(); // msg.getDeliveredSubscription(); // msg.getDeliveredToConsumer(); // msg.getMessageHandle(); // msg.getMessageNumber(); // msg.getMessagePublishInfo(); // msg.getPublisher(); // msg.getStoreContext(); // msg.isAllContentReceived(); // msg.isPersistent(); // msg.isRedelivered(); // msg.isRejectedBy(); // msg.isTaken(); //Header setup List<List> data = new LinkedList<List>(); List<String> id = new LinkedList<String>(); data.add(id); id.add(Columns.ID.name()); id.add(Console.ROW_DIVIDER); List<String> exchange = new LinkedList<String>(); List<String> routingkey = new LinkedList<String>(); List<String> immediate = new LinkedList<String>(); List<String> mandatory = new LinkedList<String>(); if (showRouting) { data.add(exchange); exchange.add(Columns.Exchange.name()); exchange.add(Console.ROW_DIVIDER); data.add(routingkey); routingkey.add(Columns.RoutingKey.name()); routingkey.add(Console.ROW_DIVIDER); data.add(immediate); immediate.add(Columns.isImmediate.name()); immediate.add(Console.ROW_DIVIDER); data.add(mandatory); mandatory.add(Columns.isMandatory.name()); mandatory.add(Console.ROW_DIVIDER); } List<String> size = new LinkedList<String>(); List<String> appid = new LinkedList<String>(); List<String> clusterid = new LinkedList<String>(); List<String> contenttype = new LinkedList<String>(); List<String> correlationid = new LinkedList<String>(); List<String> deliverymode = new LinkedList<String>(); List<String> encoding = new LinkedList<String>(); List<String> arrival = new LinkedList<String>(); List<String> expiration = new LinkedList<String>(); List<String> priority = new LinkedList<String>(); List<String> propertyflag = new LinkedList<String>(); List<String> replyto = new LinkedList<String>(); List<String> timestamp = new LinkedList<String>(); List<String> type = new LinkedList<String>(); List<String> userid = new LinkedList<String>(); List<String> ispersitent = new LinkedList<String>(); List<String> isredelivered = new LinkedList<String>(); List<String> isdelivered = new LinkedList<String>(); data.add(size); size.add(Columns.Size.name()); size.add(Console.ROW_DIVIDER); if (showHeaders) { data.add(ispersitent); ispersitent.add(Columns.isPersistent.name()); ispersitent.add(Console.ROW_DIVIDER); data.add(isredelivered); isredelivered.add(Columns.isRedelivered.name()); isredelivered.add(Console.ROW_DIVIDER); data.add(isdelivered); isdelivered.add(Columns.isDelivered.name()); isdelivered.add(Console.ROW_DIVIDER); data.add(appid); appid.add(Columns.App_ID.name()); appid.add(Console.ROW_DIVIDER); data.add(clusterid); clusterid.add(Columns.Cluster_ID.name()); clusterid.add(Console.ROW_DIVIDER); data.add(contenttype); contenttype.add(Columns.Content_Type.name()); contenttype.add(Console.ROW_DIVIDER); data.add(correlationid); correlationid.add(Columns.Correlation_ID.name()); correlationid.add(Console.ROW_DIVIDER); data.add(deliverymode); deliverymode.add(Columns.Delivery_Mode.name()); deliverymode.add(Console.ROW_DIVIDER); data.add(encoding); encoding.add(Columns.Encoding.name()); encoding.add(Console.ROW_DIVIDER); data.add(arrival); expiration.add(Columns.Arrival.name()); expiration.add(Console.ROW_DIVIDER); data.add(expiration); expiration.add(Columns.Expiration.name()); expiration.add(Console.ROW_DIVIDER); data.add(priority); priority.add(Columns.Priority.name()); priority.add(Console.ROW_DIVIDER); data.add(propertyflag); propertyflag.add(Columns.Property_Flag.name()); propertyflag.add(Console.ROW_DIVIDER); data.add(replyto); replyto.add(Columns.ReplyTo.name()); replyto.add(Console.ROW_DIVIDER); data.add(timestamp); timestamp.add(Columns.Timestamp.name()); timestamp.add(Console.ROW_DIVIDER); data.add(type); type.add(Columns.Type.name()); type.add(Console.ROW_DIVIDER); data.add(userid); userid.add(Columns.UserID.name()); userid.add(Console.ROW_DIVIDER); } List<String> msgHeaders = new LinkedList<String>(); if (showMessageHeaders) { data.add(msgHeaders); msgHeaders.add(Columns.MsgHeaders.name()); msgHeaders.add(Console.ROW_DIVIDER); } //Add create the table of data for (QueueEntry entry : messages) { ServerMessage msg = entry.getMessage(); if (!includeMsg(msg, msgids)) { continue; } id.add(msg.getMessageNumber().toString()); size.add("" + msg.getSize()); arrival.add("" + msg.getArrivalTime()); ispersitent.add(msg.isPersistent() ? "true" : "false"); isredelivered.add(entry.isRedelivered() ? "true" : "false"); isdelivered.add(entry.getDeliveredToConsumer() ? "true" : "false"); // msg.getMessageHandle(); BasicContentHeaderProperties headers = null; try { if(msg instanceof AMQMessage) { headers = ((BasicContentHeaderProperties) ((AMQMessage)msg).getContentHeaderBody().getProperties()); } } catch (AMQException e) { //ignore // commandError("Unable to read properties for message: " + e.getMessage(), null); } if (headers != null) { String appidS = headers.getAppIdAsString(); appid.add(appidS == null ? "null" : appidS); String clusterS = headers.getClusterIdAsString(); clusterid.add(clusterS == null ? "null" : clusterS); String contentS = headers.getContentTypeAsString(); contenttype.add(contentS == null ? "null" : contentS); String correlationS = headers.getCorrelationIdAsString(); correlationid.add(correlationS == null ? "null" : correlationS); deliverymode.add("" + headers.getDeliveryMode()); AMQShortString encodeSS = headers.getEncoding(); encoding.add(encodeSS == null ? "null" : encodeSS.toString()); expiration.add("" + headers.getExpiration()); FieldTable headerFT = headers.getHeaders(); msgHeaders.add(headerFT == null ? "none" : "" + headerFT.toString()); priority.add("" + headers.getPriority()); propertyflag.add("" + headers.getPropertyFlags()); AMQShortString replytoSS = headers.getReplyTo(); replyto.add(replytoSS == null ? "null" : replytoSS.toString()); timestamp.add("" + headers.getTimestamp()); AMQShortString typeSS = headers.getType(); type.add(typeSS == null ? "null" : typeSS.toString()); AMQShortString useridSS = headers.getUserId(); userid.add(useridSS == null ? "null" : useridSS.toString()); MessagePublishInfo info = null; try { if(msg instanceof AMQMessage) { info = ((AMQMessage)msg).getMessagePublishInfo(); } } catch (AMQException e) { //ignore } if (info != null) { AMQShortString exchangeSS = info.getExchange(); exchange.add(exchangeSS == null ? "null" : exchangeSS.toString()); AMQShortString routingkeySS = info.getRoutingKey(); routingkey.add(routingkeySS == null ? "null" : routingkeySS.toString()); immediate.add(info.isImmediate() ? "true" : "false"); mandatory.add(info.isMandatory() ? "true" : "false"); } // msg.getPublisher(); -- only used in clustering // msg.getStoreContext(); // msg.isAllContentReceived(); }// if headers!=null // need to access internal map and do lookups. // msg.isTaken(); // msg.getDeliveredSubscription(); // msg.isRejectedBy(); } // if id only had the header and the divider in it then we have no data to display if (id.size() == 2) { return null; } return data; } protected boolean includeMsg(ServerMessage msg, List<Long> msgids) { if (msgids == null) { return true; } Long msgid = msg.getMessageNumber(); boolean found = false; if (msgids != null) { //check msgid is in msgids for (Long l : msgids) { if (l.equals(msgid)) { found = true; break; } } } return found; } public enum Columns { ID, Size, Exchange, RoutingKey, isImmediate, isMandatory, isPersistent, isRedelivered, isDelivered, App_ID, Cluster_ID, Content_Type, Correlation_ID, Delivery_Mode, Encoding, Arrival, Expiration, Priority, Property_Flag, ReplyTo, Timestamp, Type, UserID, MsgHeaders } }
package com.armandgray.shared.viewModel; import android.util.Log; import com.armandgray.shared.application.TAAPRepository; import com.armandgray.shared.db.DatabaseManager; import com.armandgray.shared.model.Drill; import com.armandgray.shared.model.Performance; import com.armandgray.shared.model.UXPreference; import com.armandgray.shared.permission.DangerousPermission; import com.armandgray.shared.permission.PermissionManager; import com.armandgray.shared.rx.SchedulerProvider; import com.armandgray.shared.sensors.GeneralSensorManager; import com.armandgray.shared.sensors.LinearAccelerationAction; import com.armandgray.shared.voice.VoiceEvent; import com.armandgray.shared.voice.VoiceManager; import java.util.List; import java.util.concurrent.TimeUnit; import javax.inject.Inject; import javax.inject.Singleton; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import io.reactivex.Observable; import io.reactivex.ObservableSource; import io.reactivex.ObservableTransformer; import io.reactivex.SingleObserver; import io.reactivex.disposables.Disposable; import io.reactivex.functions.Function; import io.reactivex.subjects.BehaviorSubject; import io.reactivex.subjects.PublishSubject; @Singleton class DrillRepository extends TAAPRepository { private static int singleSetTimeout = UXPreference.Item.TIMEOUT.getDefault(true); private static boolean enableAutoTracking = UXPreference.Item.AUTO.isDefaultEnabled(); private static boolean enableCallOut = UXPreference.Item.CALL_OUT.isDefaultEnabled(); private static boolean enableClap = UXPreference.Item.CLAP.isDefaultEnabled(); private static int voiceTimeout = UXPreference.Item.VOICE_TIMEOUT.getDefault(true); @Inject GeneralSensorManager generalSensorManager; @Inject VoiceManager voiceManager; @Inject PermissionManager permissionManager; @VisibleForTesting final BehaviorSubject<List<Drill>> drillsSubject = BehaviorSubject.create(); @VisibleForTesting final BehaviorSubject<Drill> activeDrillSubject = BehaviorSubject.create(); @VisibleForTesting final BehaviorSubject<Performance> performanceSubject = BehaviorSubject.create(); @VisibleForTesting final PublishSubject<Performance> completionSubject = PublishSubject.create(); private final DatabaseManager databaseManager; private final SchedulerProvider schedulers; @Nullable private Disposable setTimeoutDisposable; @SuppressWarnings("ConstantConditions") @Inject DrillRepository(PreferencesRepository preferencesRepository, DatabaseManager databaseManager, SchedulerProvider schedulers) { this.databaseManager = databaseManager; this.schedulers = schedulers; super.disposables.add(preferencesRepository.getPreferenceUpdateObservable() .subscribe(this::preferenceConsumer)); DatabaseManager.Wrapper.getDatabaseReady() .andThen(databaseManager.getDrillDao().all()) .toObservable() .compose(schedulers.asyncTask()) .subscribe(onDrillsRetrieved()); } private void preferenceConsumer(UXPreference preference) { switch (preference.getCategory()) { case WORKOUT: singleSetTimeout = preference.getValue(UXPreference.Item.TIMEOUT, true); enableAutoTracking = preference.isEnabled(UXPreference.Item.AUTO); return; case VOICE: enableCallOut = preference.isEnabled(UXPreference.Item.CALL_OUT); enableClap = preference.isEnabled(UXPreference.Item.CLAP); voiceTimeout = preference.getValue(UXPreference.Item.VOICE_TIMEOUT, true); default: onDrillPreferenceChanged(preference); } } private void onDrillPreferenceChanged(UXPreference preference) { if (!preference.getCategory().isDrillCategory() || activeDrillSubject.getValue() == null) { return; } Performance performance = performanceSubject.getValue(); Drill drill = activeDrillSubject.getValue(); Performance update = new Performance(drill); databaseManager.getDrillDao().update(drill).subscribe(new SingleObserver<Integer>() { @Override public void onSubscribe(Disposable d) { disposables.add(d); } @Override public void onSuccess(Integer integer) { Log.d(TAG, "Drill Update Success: " + drill); } @Override public void onError(Throwable e) { Log.e(TAG, "Drill Update Failed: " + drill); } }); if (performance != null) { update.setCount(performance.getCount()); update.setTotal(performance.getTotal()); update.setStartTime(performance.getStartTime()); } setPerformanceValue(update); } private RepositoryObserver<List<Drill>> onDrillsRetrieved() { return new RepositoryObserver<List<Drill>>() { @Override public void onNext(List<Drill> list) { updateDrillSubscribers(list); } }; } @VisibleForTesting void updateDrillSubscribers(List<Drill> list) { @SuppressWarnings("ConstantConditions") Drill drill = list.stream().filter(Drill.Defaults.getDefault()::equals).findFirst().get(); drillsSubject.onNext(list); activeDrillSubject.onNext(drill); performanceSubject.onNext(new Performance(drill)); } Observable<List<Drill>> getDrillsObservable() { return drillsSubject; } Observable<Drill> getActiveDrillObservable() { return activeDrillSubject; } Observable<Performance> getPerformanceObservable() { return performanceSubject; } Observable<Performance> getCompletionObservable() { return completionSubject; } Observable<LinearAccelerationAction> getAutoTrackingObservable() { // TODO Fix enabling race condition with preference update return enableAutoTracking ? generalSensorManager.getAutoTrackingObservable() : Observable.just(LinearAccelerationAction.NONE); } Observable<VoiceEvent> getVoiceEventObservable() { // Disable Non-Triggered Events while Auto Tracking return !enableAutoTracking ? getTriggeredVoiceEventObservable() : Observable.just(VoiceEvent.NONE); } Observable<VoiceEvent> getTriggeredVoiceEventObservable() { if (!enableCallOut && !enableClap) { return Observable.just(VoiceEvent.NONE); } DangerousPermission permission = DangerousPermission.MICROPHONE; Observable<Boolean> permissionObservable = permissionManager.usePermission(permission); if (enableClap && enableCallOut) { // TODO implement amb for first event return permissionObservable .concatMap(concatPermissionOverVoiceEvent(voiceManager.getClapObservable())) .compose(schedulers.asyncTask()); } else if (enableCallOut) { return permissionObservable .concatMap(concatPermissionOverVoiceEvent(voiceManager.getCallOutObservable())) .compose(schedulers.asyncTask()); } else { return permissionObservable .concatMap(concatPermissionOverVoiceEvent(voiceManager.getClapObservable())) .subscribeOn(schedulers.looper()) .observeOn(schedulers.ui()); } } private ObservableTransformer<VoiceEvent, VoiceEvent> applyVoiceTimeout() { return observable -> observable .timeout(voiceTimeout, TimeUnit.MILLISECONDS) .onErrorReturnItem(VoiceEvent.TIMEOUT); } private Function<Boolean, ObservableSource<VoiceEvent>> concatPermissionOverVoiceEvent( Observable<VoiceEvent> observable) { return hasPermission -> { VoiceEvent missingPermission = VoiceEvent.MISSING_PERMISSION; return hasPermission ? observable : Observable.just(missingPermission); }; } void addMake() { Performance performance = this.performanceSubject.getValue(); if (performance == null) { return; } performance.raiseCount(); performance.raiseTotal(); updateSetTimeout(); setPerformanceValue(performance); } void addMiss() { Performance performance = this.performanceSubject.getValue(); if (performance == null) { return; } performance.raiseTotal(); updateSetTimeout(); setPerformanceValue(performance); } private void updateSetTimeout() { if (setTimeoutDisposable != null) { setTimeoutDisposable.dispose(); } setTimeoutDisposable = Observable.timer(singleSetTimeout, TimeUnit.MILLISECONDS) .subscribeOn(schedulers.io()) .observeOn(schedulers.ui()) .subscribe(timeout -> handleSetCompletion(performanceSubject.getValue())); } void clearPerformance() { Performance performance = this.performanceSubject.getValue(); if (performance == null) { return; } performance.clear(); setPerformanceValue(performance); } private void setPerformanceValue(Performance performance) { performanceSubject.onNext(performance); if (performance.getTotal() >= performance.getReps()) { handleSetCompletion(performance); } } void setActiveDrill(@NonNull Drill drill) { activeDrillSubject.onNext(drill); if (performanceSubject.getValue() == null) { performanceSubject.onNext(new Performance(drill)); } else { handleSetCompletion(performanceSubject.getValue()); } } private void handleSetCompletion(Performance performance) { //noinspection ConstantConditions performanceSubject.onNext(new Performance(activeDrillSubject.getValue())); if (performance.getTotal() > 0) { performance.captureEndTime(); completionSubject.onNext(performance); storePerformance(performance); } } private void storePerformance(Performance performance) { databaseManager.getPerformanceDao().insert(performance).subscribe(new SingleObserver<List<Long>>() { @Override public void onSubscribe(Disposable d) { disposables.add(d); } @Override public void onSuccess(List<Long> rowIds) { Log.d(TAG, "Performance Insertion Success: " + performance); } @Override public void onError(Throwable e) { Log.e(TAG, "Performance Insertion Failed: " + performance); } }); } }
/** * * Copyright (c) Microsoft and contributors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. package com.microsoft.windowsazure.management.network; import com.microsoft.windowsazure.core.OperationStatus; import com.microsoft.windowsazure.core.OperationStatusResponse; import com.microsoft.windowsazure.core.ServiceOperations; import com.microsoft.windowsazure.core.utils.BOMInputStream; import com.microsoft.windowsazure.core.utils.XmlUtility; import com.microsoft.windowsazure.exception.CloudError; import com.microsoft.windowsazure.exception.ServiceException; import com.microsoft.windowsazure.management.network.models.IPForwardingGetResponse; import com.microsoft.windowsazure.management.network.models.IPForwardingSetParameters; import com.microsoft.windowsazure.tracing.ClientRequestTrackingHandler; import com.microsoft.windowsazure.tracing.CloudTracing; import org.apache.http.HttpResponse; import org.apache.http.HttpStatus; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.net.URLEncoder; import java.util.HashMap; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; /** * The Network Management API includes operations for managing the IP Forwarding * for your roles and network interfaces in your subscription. */ public class IPForwardingOperationsImpl implements ServiceOperations<NetworkManagementClientImpl>, IPForwardingOperations { /** * Initializes a new instance of the IPForwardingOperationsImpl class. * * @param client Reference to the service client. */ IPForwardingOperationsImpl(NetworkManagementClientImpl client) { this.client = client; } private NetworkManagementClientImpl client; /** * Gets a reference to the * microsoft.windowsazure.management.network.NetworkManagementClientImpl. * @return The Client value. */ public NetworkManagementClientImpl getClient() { return this.client; } /** * Sets IP Forwarding on a network interface. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param networkInterfaceName Required. * @param parameters Required. Parameters supplied to the Set IP Forwarding * on network interface operation. * @return The response body contains the status of the specified * asynchronous operation, indicating whether it has succeeded, is * inprogress, or has failed. Note that this status is distinct from the * HTTP status code returned for the Get Operation Status operation itself. * If the asynchronous operation succeeded, the response body includes the * HTTP status code for the successful request. If the asynchronous * operation failed, the response body includes the HTTP status code for * the failed request, and also includes error information regarding the * failure. */ @Override public Future<OperationStatusResponse> beginSettingIPForwardingOnNetworkInterfaceAsync(final String serviceName, final String deploymentName, final String roleName, final String networkInterfaceName, final IPForwardingSetParameters parameters) { return this.getClient().getExecutorService().submit(new Callable<OperationStatusResponse>() { @Override public OperationStatusResponse call() throws Exception { return beginSettingIPForwardingOnNetworkInterface(serviceName, deploymentName, roleName, networkInterfaceName, parameters); } }); } /** * Sets IP Forwarding on a network interface. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param networkInterfaceName Required. * @param parameters Required. Parameters supplied to the Set IP Forwarding * on network interface operation. * @throws ParserConfigurationException Thrown if there was an error * configuring the parser for the response body. * @throws SAXException Thrown if there was an error parsing the response * body. * @throws TransformerException Thrown if there was an error creating the * DOM transformer. * @throws IOException Signals that an I/O exception of some sort has * occurred. This class is the general class of exceptions produced by * failed or interrupted I/O operations. * @throws ServiceException Thrown if an unexpected response is found. * @return The response body contains the status of the specified * asynchronous operation, indicating whether it has succeeded, is * inprogress, or has failed. Note that this status is distinct from the * HTTP status code returned for the Get Operation Status operation itself. * If the asynchronous operation succeeded, the response body includes the * HTTP status code for the successful request. If the asynchronous * operation failed, the response body includes the HTTP status code for * the failed request, and also includes error information regarding the * failure. */ @Override public OperationStatusResponse beginSettingIPForwardingOnNetworkInterface(String serviceName, String deploymentName, String roleName, String networkInterfaceName, IPForwardingSetParameters parameters) throws ParserConfigurationException, SAXException, TransformerException, IOException, ServiceException { // Validate if (serviceName == null) { throw new NullPointerException("serviceName"); } if (deploymentName == null) { throw new NullPointerException("deploymentName"); } if (roleName == null) { throw new NullPointerException("roleName"); } if (networkInterfaceName == null) { throw new NullPointerException("networkInterfaceName"); } if (parameters == null) { throw new NullPointerException("parameters"); } if (parameters.getState() == null) { throw new NullPointerException("parameters.State"); } // Tracing boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("serviceName", serviceName); tracingParameters.put("deploymentName", deploymentName); tracingParameters.put("roleName", roleName); tracingParameters.put("networkInterfaceName", networkInterfaceName); tracingParameters.put("parameters", parameters); CloudTracing.enter(invocationId, this, "beginSettingIPForwardingOnNetworkInterfaceAsync", tracingParameters); } // Construct URL String url = ""; url = url + "/"; if (this.getClient().getCredentials().getSubscriptionId() != null) { url = url + URLEncoder.encode(this.getClient().getCredentials().getSubscriptionId(), "UTF-8"); } url = url + "/services/hostedservices/"; url = url + URLEncoder.encode(serviceName, "UTF-8"); url = url + "/deployments/"; url = url + URLEncoder.encode(deploymentName, "UTF-8"); url = url + "/roles/"; url = url + URLEncoder.encode(roleName, "UTF-8"); url = url + "/networkinterfaces/"; url = url + URLEncoder.encode(networkInterfaceName, "UTF-8"); url = url + "/ipforwarding"; String baseUrl = this.getClient().getBaseUri().toString(); // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl.charAt(baseUrl.length() - 1) == '/') { baseUrl = baseUrl.substring(0, (baseUrl.length() - 1) + 0); } if (url.charAt(0) == '/') { url = url.substring(1); } url = baseUrl + "/" + url; url = url.replace(" ", "%20"); // Create HTTP transport objects HttpPost httpRequest = new HttpPost(url); // Set Headers httpRequest.setHeader("Content-Type", "application/xml"); httpRequest.setHeader("x-ms-version", "2015-04-01"); // Serialize Request String requestContent = null; DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document requestDoc = documentBuilder.newDocument(); Element iPForwardingElement = requestDoc.createElementNS("http://schemas.microsoft.com/windowsazure", "IPForwarding"); requestDoc.appendChild(iPForwardingElement); Element stateElement = requestDoc.createElementNS("http://schemas.microsoft.com/windowsazure", "State"); stateElement.appendChild(requestDoc.createTextNode(parameters.getState())); iPForwardingElement.appendChild(stateElement); DOMSource domSource = new DOMSource(requestDoc); StringWriter stringWriter = new StringWriter(); StreamResult streamResult = new StreamResult(stringWriter); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); transformer.transform(domSource, streamResult); requestContent = stringWriter.toString(); StringEntity entity = new StringEntity(requestContent); httpRequest.setEntity(entity); httpRequest.setHeader("Content-Type", "application/xml"); // Send Request HttpResponse httpResponse = null; try { if (shouldTrace) { CloudTracing.sendRequest(invocationId, httpRequest); } httpResponse = this.getClient().getHttpClient().execute(httpRequest); if (shouldTrace) { CloudTracing.receiveResponse(invocationId, httpResponse); } int statusCode = httpResponse.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_ACCEPTED) { ServiceException ex = ServiceException.createFromXml(httpRequest, requestContent, httpResponse, httpResponse.getEntity()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } // Create Result OperationStatusResponse result = null; // Deserialize Response result = new OperationStatusResponse(); result.setStatusCode(statusCode); if (httpResponse.getHeaders("x-ms-request-id").length > 0) { result.setRequestId(httpResponse.getFirstHeader("x-ms-request-id").getValue()); } if (shouldTrace) { CloudTracing.exit(invocationId, result); } return result; } finally { if (httpResponse != null && httpResponse.getEntity() != null) { httpResponse.getEntity().getContent().close(); } } } /** * Sets IP Forwarding on a role. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param parameters Required. Parameters supplied to the Set IP Forwarding * on role operation. * @return The response body contains the status of the specified * asynchronous operation, indicating whether it has succeeded, is * inprogress, or has failed. Note that this status is distinct from the * HTTP status code returned for the Get Operation Status operation itself. * If the asynchronous operation succeeded, the response body includes the * HTTP status code for the successful request. If the asynchronous * operation failed, the response body includes the HTTP status code for * the failed request, and also includes error information regarding the * failure. */ @Override public Future<OperationStatusResponse> beginSettingIPForwardingOnRoleAsync(final String serviceName, final String deploymentName, final String roleName, final IPForwardingSetParameters parameters) { return this.getClient().getExecutorService().submit(new Callable<OperationStatusResponse>() { @Override public OperationStatusResponse call() throws Exception { return beginSettingIPForwardingOnRole(serviceName, deploymentName, roleName, parameters); } }); } /** * Sets IP Forwarding on a role. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param parameters Required. Parameters supplied to the Set IP Forwarding * on role operation. * @throws ParserConfigurationException Thrown if there was an error * configuring the parser for the response body. * @throws SAXException Thrown if there was an error parsing the response * body. * @throws TransformerException Thrown if there was an error creating the * DOM transformer. * @throws IOException Signals that an I/O exception of some sort has * occurred. This class is the general class of exceptions produced by * failed or interrupted I/O operations. * @throws ServiceException Thrown if an unexpected response is found. * @return The response body contains the status of the specified * asynchronous operation, indicating whether it has succeeded, is * inprogress, or has failed. Note that this status is distinct from the * HTTP status code returned for the Get Operation Status operation itself. * If the asynchronous operation succeeded, the response body includes the * HTTP status code for the successful request. If the asynchronous * operation failed, the response body includes the HTTP status code for * the failed request, and also includes error information regarding the * failure. */ @Override public OperationStatusResponse beginSettingIPForwardingOnRole(String serviceName, String deploymentName, String roleName, IPForwardingSetParameters parameters) throws ParserConfigurationException, SAXException, TransformerException, IOException, ServiceException { // Validate if (serviceName == null) { throw new NullPointerException("serviceName"); } if (deploymentName == null) { throw new NullPointerException("deploymentName"); } if (roleName == null) { throw new NullPointerException("roleName"); } if (parameters == null) { throw new NullPointerException("parameters"); } if (parameters.getState() == null) { throw new NullPointerException("parameters.State"); } // Tracing boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("serviceName", serviceName); tracingParameters.put("deploymentName", deploymentName); tracingParameters.put("roleName", roleName); tracingParameters.put("parameters", parameters); CloudTracing.enter(invocationId, this, "beginSettingIPForwardingOnRoleAsync", tracingParameters); } // Construct URL String url = ""; url = url + "/"; if (this.getClient().getCredentials().getSubscriptionId() != null) { url = url + URLEncoder.encode(this.getClient().getCredentials().getSubscriptionId(), "UTF-8"); } url = url + "/services/hostedservices/"; url = url + URLEncoder.encode(serviceName, "UTF-8"); url = url + "/deployments/"; url = url + URLEncoder.encode(deploymentName, "UTF-8"); url = url + "/roles/"; url = url + URLEncoder.encode(roleName, "UTF-8"); url = url + "/ipforwarding"; String baseUrl = this.getClient().getBaseUri().toString(); // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl.charAt(baseUrl.length() - 1) == '/') { baseUrl = baseUrl.substring(0, (baseUrl.length() - 1) + 0); } if (url.charAt(0) == '/') { url = url.substring(1); } url = baseUrl + "/" + url; url = url.replace(" ", "%20"); // Create HTTP transport objects HttpPost httpRequest = new HttpPost(url); // Set Headers httpRequest.setHeader("Content-Type", "application/xml"); httpRequest.setHeader("x-ms-version", "2015-04-01"); // Serialize Request String requestContent = null; DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document requestDoc = documentBuilder.newDocument(); Element iPForwardingElement = requestDoc.createElementNS("http://schemas.microsoft.com/windowsazure", "IPForwarding"); requestDoc.appendChild(iPForwardingElement); Element stateElement = requestDoc.createElementNS("http://schemas.microsoft.com/windowsazure", "State"); stateElement.appendChild(requestDoc.createTextNode(parameters.getState())); iPForwardingElement.appendChild(stateElement); DOMSource domSource = new DOMSource(requestDoc); StringWriter stringWriter = new StringWriter(); StreamResult streamResult = new StreamResult(stringWriter); TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); transformer.transform(domSource, streamResult); requestContent = stringWriter.toString(); StringEntity entity = new StringEntity(requestContent); httpRequest.setEntity(entity); httpRequest.setHeader("Content-Type", "application/xml"); // Send Request HttpResponse httpResponse = null; try { if (shouldTrace) { CloudTracing.sendRequest(invocationId, httpRequest); } httpResponse = this.getClient().getHttpClient().execute(httpRequest); if (shouldTrace) { CloudTracing.receiveResponse(invocationId, httpResponse); } int statusCode = httpResponse.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_ACCEPTED) { ServiceException ex = ServiceException.createFromXml(httpRequest, requestContent, httpResponse, httpResponse.getEntity()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } // Create Result OperationStatusResponse result = null; // Deserialize Response result = new OperationStatusResponse(); result.setStatusCode(statusCode); if (httpResponse.getHeaders("x-ms-request-id").length > 0) { result.setRequestId(httpResponse.getFirstHeader("x-ms-request-id").getValue()); } if (shouldTrace) { CloudTracing.exit(invocationId, result); } return result; } finally { if (httpResponse != null && httpResponse.getEntity() != null) { httpResponse.getEntity().getContent().close(); } } } /** * Gets the IP Forwarding applied to a network interface. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param networkInterfaceName Required. * @return The IP Forwarding state associated with a role or network * interface. */ @Override public Future<IPForwardingGetResponse> getForNetworkInterfaceAsync(final String serviceName, final String deploymentName, final String roleName, final String networkInterfaceName) { return this.getClient().getExecutorService().submit(new Callable<IPForwardingGetResponse>() { @Override public IPForwardingGetResponse call() throws Exception { return getForNetworkInterface(serviceName, deploymentName, roleName, networkInterfaceName); } }); } /** * Gets the IP Forwarding applied to a network interface. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param networkInterfaceName Required. * @throws IOException Signals that an I/O exception of some sort has * occurred. This class is the general class of exceptions produced by * failed or interrupted I/O operations. * @throws ServiceException Thrown if an unexpected response is found. * @throws ParserConfigurationException Thrown if there was a serious * configuration error with the document parser. * @throws SAXException Thrown if there was an error parsing the XML * response. * @return The IP Forwarding state associated with a role or network * interface. */ @Override public IPForwardingGetResponse getForNetworkInterface(String serviceName, String deploymentName, String roleName, String networkInterfaceName) throws IOException, ServiceException, ParserConfigurationException, SAXException { // Validate if (serviceName == null) { throw new NullPointerException("serviceName"); } if (deploymentName == null) { throw new NullPointerException("deploymentName"); } if (roleName == null) { throw new NullPointerException("roleName"); } if (networkInterfaceName == null) { throw new NullPointerException("networkInterfaceName"); } // Tracing boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("serviceName", serviceName); tracingParameters.put("deploymentName", deploymentName); tracingParameters.put("roleName", roleName); tracingParameters.put("networkInterfaceName", networkInterfaceName); CloudTracing.enter(invocationId, this, "getForNetworkInterfaceAsync", tracingParameters); } // Construct URL String url = ""; url = url + "/"; if (this.getClient().getCredentials().getSubscriptionId() != null) { url = url + URLEncoder.encode(this.getClient().getCredentials().getSubscriptionId(), "UTF-8"); } url = url + "/services/hostedservices/"; url = url + URLEncoder.encode(serviceName, "UTF-8"); url = url + "/deployments/"; url = url + URLEncoder.encode(deploymentName, "UTF-8"); url = url + "/roles/"; url = url + URLEncoder.encode(roleName, "UTF-8"); url = url + "/networkinterfaces/"; url = url + URLEncoder.encode(networkInterfaceName, "UTF-8"); url = url + "/ipforwarding"; String baseUrl = this.getClient().getBaseUri().toString(); // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl.charAt(baseUrl.length() - 1) == '/') { baseUrl = baseUrl.substring(0, (baseUrl.length() - 1) + 0); } if (url.charAt(0) == '/') { url = url.substring(1); } url = baseUrl + "/" + url; url = url.replace(" ", "%20"); // Create HTTP transport objects HttpGet httpRequest = new HttpGet(url); // Set Headers httpRequest.setHeader("x-ms-version", "2015-04-01"); // Send Request HttpResponse httpResponse = null; try { if (shouldTrace) { CloudTracing.sendRequest(invocationId, httpRequest); } httpResponse = this.getClient().getHttpClient().execute(httpRequest); if (shouldTrace) { CloudTracing.receiveResponse(invocationId, httpResponse); } int statusCode = httpResponse.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { ServiceException ex = ServiceException.createFromXml(httpRequest, null, httpResponse, httpResponse.getEntity()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } // Create Result IPForwardingGetResponse result = null; // Deserialize Response if (statusCode == HttpStatus.SC_OK) { InputStream responseContent = httpResponse.getEntity().getContent(); result = new IPForwardingGetResponse(); DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document responseDoc = documentBuilder.parse(new BOMInputStream(responseContent)); Element iPForwardingElement = XmlUtility.getElementByTagNameNS(responseDoc, "http://schemas.microsoft.com/windowsazure", "IPForwarding"); if (iPForwardingElement != null) { Element stateElement = XmlUtility.getElementByTagNameNS(iPForwardingElement, "http://schemas.microsoft.com/windowsazure", "State"); if (stateElement != null) { String stateInstance; stateInstance = stateElement.getTextContent(); result.setState(stateInstance); } } } result.setStatusCode(statusCode); if (httpResponse.getHeaders("x-ms-request-id").length > 0) { result.setRequestId(httpResponse.getFirstHeader("x-ms-request-id").getValue()); } if (shouldTrace) { CloudTracing.exit(invocationId, result); } return result; } finally { if (httpResponse != null && httpResponse.getEntity() != null) { httpResponse.getEntity().getContent().close(); } } } /** * Gets the IP Forwarding applied to a role. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @return The IP Forwarding state associated with a role or network * interface. */ @Override public Future<IPForwardingGetResponse> getForRoleAsync(final String serviceName, final String deploymentName, final String roleName) { return this.getClient().getExecutorService().submit(new Callable<IPForwardingGetResponse>() { @Override public IPForwardingGetResponse call() throws Exception { return getForRole(serviceName, deploymentName, roleName); } }); } /** * Gets the IP Forwarding applied to a role. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @throws IOException Signals that an I/O exception of some sort has * occurred. This class is the general class of exceptions produced by * failed or interrupted I/O operations. * @throws ServiceException Thrown if an unexpected response is found. * @throws ParserConfigurationException Thrown if there was a serious * configuration error with the document parser. * @throws SAXException Thrown if there was an error parsing the XML * response. * @return The IP Forwarding state associated with a role or network * interface. */ @Override public IPForwardingGetResponse getForRole(String serviceName, String deploymentName, String roleName) throws IOException, ServiceException, ParserConfigurationException, SAXException { // Validate if (serviceName == null) { throw new NullPointerException("serviceName"); } if (deploymentName == null) { throw new NullPointerException("deploymentName"); } if (roleName == null) { throw new NullPointerException("roleName"); } // Tracing boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("serviceName", serviceName); tracingParameters.put("deploymentName", deploymentName); tracingParameters.put("roleName", roleName); CloudTracing.enter(invocationId, this, "getForRoleAsync", tracingParameters); } // Construct URL String url = ""; url = url + "/"; if (this.getClient().getCredentials().getSubscriptionId() != null) { url = url + URLEncoder.encode(this.getClient().getCredentials().getSubscriptionId(), "UTF-8"); } url = url + "/services/hostedservices/"; url = url + URLEncoder.encode(serviceName, "UTF-8"); url = url + "/deployments/"; url = url + URLEncoder.encode(deploymentName, "UTF-8"); url = url + "/roles/"; url = url + URLEncoder.encode(roleName, "UTF-8"); url = url + "/ipforwarding"; String baseUrl = this.getClient().getBaseUri().toString(); // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl.charAt(baseUrl.length() - 1) == '/') { baseUrl = baseUrl.substring(0, (baseUrl.length() - 1) + 0); } if (url.charAt(0) == '/') { url = url.substring(1); } url = baseUrl + "/" + url; url = url.replace(" ", "%20"); // Create HTTP transport objects HttpGet httpRequest = new HttpGet(url); // Set Headers httpRequest.setHeader("x-ms-version", "2015-04-01"); // Send Request HttpResponse httpResponse = null; try { if (shouldTrace) { CloudTracing.sendRequest(invocationId, httpRequest); } httpResponse = this.getClient().getHttpClient().execute(httpRequest); if (shouldTrace) { CloudTracing.receiveResponse(invocationId, httpResponse); } int statusCode = httpResponse.getStatusLine().getStatusCode(); if (statusCode != HttpStatus.SC_OK) { ServiceException ex = ServiceException.createFromXml(httpRequest, null, httpResponse, httpResponse.getEntity()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } // Create Result IPForwardingGetResponse result = null; // Deserialize Response if (statusCode == HttpStatus.SC_OK) { InputStream responseContent = httpResponse.getEntity().getContent(); result = new IPForwardingGetResponse(); DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); DocumentBuilder documentBuilder = documentBuilderFactory.newDocumentBuilder(); Document responseDoc = documentBuilder.parse(new BOMInputStream(responseContent)); Element iPForwardingElement = XmlUtility.getElementByTagNameNS(responseDoc, "http://schemas.microsoft.com/windowsazure", "IPForwarding"); if (iPForwardingElement != null) { Element stateElement = XmlUtility.getElementByTagNameNS(iPForwardingElement, "http://schemas.microsoft.com/windowsazure", "State"); if (stateElement != null) { String stateInstance; stateInstance = stateElement.getTextContent(); result.setState(stateInstance); } } } result.setStatusCode(statusCode); if (httpResponse.getHeaders("x-ms-request-id").length > 0) { result.setRequestId(httpResponse.getFirstHeader("x-ms-request-id").getValue()); } if (shouldTrace) { CloudTracing.exit(invocationId, result); } return result; } finally { if (httpResponse != null && httpResponse.getEntity() != null) { httpResponse.getEntity().getContent().close(); } } } /** * Sets IP Forwarding on a network interface. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param networkInterfaceName Required. * @param parameters Required. Parameters supplied to the Set IP Forwarding * on network interface operation. * @return The response body contains the status of the specified * asynchronous operation, indicating whether it has succeeded, is * inprogress, or has failed. Note that this status is distinct from the * HTTP status code returned for the Get Operation Status operation itself. * If the asynchronous operation succeeded, the response body includes the * HTTP status code for the successful request. If the asynchronous * operation failed, the response body includes the HTTP status code for * the failed request, and also includes error information regarding the * failure. */ @Override public Future<OperationStatusResponse> setOnNetworkInterfaceAsync(final String serviceName, final String deploymentName, final String roleName, final String networkInterfaceName, final IPForwardingSetParameters parameters) { return this.getClient().getExecutorService().submit(new Callable<OperationStatusResponse>() { @Override public OperationStatusResponse call() throws Exception { return setOnNetworkInterface(serviceName, deploymentName, roleName, networkInterfaceName, parameters); } }); } /** * Sets IP Forwarding on a network interface. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param networkInterfaceName Required. * @param parameters Required. Parameters supplied to the Set IP Forwarding * on network interface operation. * @throws InterruptedException Thrown when a thread is waiting, sleeping, * or otherwise occupied, and the thread is interrupted, either before or * during the activity. Occasionally a method may wish to test whether the * current thread has been interrupted, and if so, to immediately throw * this exception. The following code can be used to achieve this effect: * @throws ExecutionException Thrown when attempting to retrieve the result * of a task that aborted by throwing an exception. This exception can be * inspected using the Throwable.getCause() method. * @throws ServiceException Thrown if the server returned an error for the * request. * @throws IOException Thrown if there was an error setting up tracing for * the request. * @return The response body contains the status of the specified * asynchronous operation, indicating whether it has succeeded, is * inprogress, or has failed. Note that this status is distinct from the * HTTP status code returned for the Get Operation Status operation itself. * If the asynchronous operation succeeded, the response body includes the * HTTP status code for the successful request. If the asynchronous * operation failed, the response body includes the HTTP status code for * the failed request, and also includes error information regarding the * failure. */ @Override public OperationStatusResponse setOnNetworkInterface(String serviceName, String deploymentName, String roleName, String networkInterfaceName, IPForwardingSetParameters parameters) throws InterruptedException, ExecutionException, ServiceException, IOException { NetworkManagementClient client2 = this.getClient(); boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("serviceName", serviceName); tracingParameters.put("deploymentName", deploymentName); tracingParameters.put("roleName", roleName); tracingParameters.put("networkInterfaceName", networkInterfaceName); tracingParameters.put("parameters", parameters); CloudTracing.enter(invocationId, this, "setOnNetworkInterfaceAsync", tracingParameters); } try { if (shouldTrace) { client2 = this.getClient().withRequestFilterLast(new ClientRequestTrackingHandler(invocationId)).withResponseFilterLast(new ClientRequestTrackingHandler(invocationId)); } OperationStatusResponse response = client2.getIPForwardingOperations().beginSettingIPForwardingOnNetworkInterfaceAsync(serviceName, deploymentName, roleName, networkInterfaceName, parameters).get(); if (response.getStatus() == OperationStatus.Succeeded) { return response; } OperationStatusResponse result = client2.getOperationStatusAsync(response.getRequestId()).get(); int delayInSeconds = 30; if (client2.getLongRunningOperationInitialTimeout() >= 0) { delayInSeconds = client2.getLongRunningOperationInitialTimeout(); } while (result.getStatus() != null && result.getStatus().equals(OperationStatus.InProgress)) { Thread.sleep(delayInSeconds * 1000); result = client2.getOperationStatusAsync(response.getRequestId()).get(); delayInSeconds = 30; if (client2.getLongRunningOperationRetryTimeout() >= 0) { delayInSeconds = client2.getLongRunningOperationRetryTimeout(); } } if (shouldTrace) { CloudTracing.exit(invocationId, result); } if (result.getStatus() != OperationStatus.Succeeded) { if (result.getError() != null) { ServiceException ex = new ServiceException(result.getError().getCode() + " : " + result.getError().getMessage()); ex.setError(new CloudError()); ex.getError().setCode(result.getError().getCode()); ex.getError().setMessage(result.getError().getMessage()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } else { ServiceException ex = new ServiceException(""); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } } return result; } finally { if (client2 != null && shouldTrace) { client2.close(); } } } /** * Sets IP Forwarding on a role. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param parameters Required. Parameters supplied to the Set IP Forwarding * on role operation. * @return The response body contains the status of the specified * asynchronous operation, indicating whether it has succeeded, is * inprogress, or has failed. Note that this status is distinct from the * HTTP status code returned for the Get Operation Status operation itself. * If the asynchronous operation succeeded, the response body includes the * HTTP status code for the successful request. If the asynchronous * operation failed, the response body includes the HTTP status code for * the failed request, and also includes error information regarding the * failure. */ @Override public Future<OperationStatusResponse> setOnRoleAsync(final String serviceName, final String deploymentName, final String roleName, final IPForwardingSetParameters parameters) { return this.getClient().getExecutorService().submit(new Callable<OperationStatusResponse>() { @Override public OperationStatusResponse call() throws Exception { return setOnRole(serviceName, deploymentName, roleName, parameters); } }); } /** * Sets IP Forwarding on a role. * * @param serviceName Required. * @param deploymentName Required. * @param roleName Required. * @param parameters Required. Parameters supplied to the Set IP Forwarding * on role operation. * @throws InterruptedException Thrown when a thread is waiting, sleeping, * or otherwise occupied, and the thread is interrupted, either before or * during the activity. Occasionally a method may wish to test whether the * current thread has been interrupted, and if so, to immediately throw * this exception. The following code can be used to achieve this effect: * @throws ExecutionException Thrown when attempting to retrieve the result * of a task that aborted by throwing an exception. This exception can be * inspected using the Throwable.getCause() method. * @throws ServiceException Thrown if the server returned an error for the * request. * @throws IOException Thrown if there was an error setting up tracing for * the request. * @return The response body contains the status of the specified * asynchronous operation, indicating whether it has succeeded, is * inprogress, or has failed. Note that this status is distinct from the * HTTP status code returned for the Get Operation Status operation itself. * If the asynchronous operation succeeded, the response body includes the * HTTP status code for the successful request. If the asynchronous * operation failed, the response body includes the HTTP status code for * the failed request, and also includes error information regarding the * failure. */ @Override public OperationStatusResponse setOnRole(String serviceName, String deploymentName, String roleName, IPForwardingSetParameters parameters) throws InterruptedException, ExecutionException, ServiceException, IOException { NetworkManagementClient client2 = this.getClient(); boolean shouldTrace = CloudTracing.getIsEnabled(); String invocationId = null; if (shouldTrace) { invocationId = Long.toString(CloudTracing.getNextInvocationId()); HashMap<String, Object> tracingParameters = new HashMap<String, Object>(); tracingParameters.put("serviceName", serviceName); tracingParameters.put("deploymentName", deploymentName); tracingParameters.put("roleName", roleName); tracingParameters.put("parameters", parameters); CloudTracing.enter(invocationId, this, "setOnRoleAsync", tracingParameters); } try { if (shouldTrace) { client2 = this.getClient().withRequestFilterLast(new ClientRequestTrackingHandler(invocationId)).withResponseFilterLast(new ClientRequestTrackingHandler(invocationId)); } OperationStatusResponse response = client2.getIPForwardingOperations().beginSettingIPForwardingOnRoleAsync(serviceName, deploymentName, roleName, parameters).get(); if (response.getStatus() == OperationStatus.Succeeded) { return response; } OperationStatusResponse result = client2.getOperationStatusAsync(response.getRequestId()).get(); int delayInSeconds = 30; if (client2.getLongRunningOperationInitialTimeout() >= 0) { delayInSeconds = client2.getLongRunningOperationInitialTimeout(); } while (result.getStatus() != null && result.getStatus().equals(OperationStatus.InProgress)) { Thread.sleep(delayInSeconds * 1000); result = client2.getOperationStatusAsync(response.getRequestId()).get(); delayInSeconds = 30; if (client2.getLongRunningOperationRetryTimeout() >= 0) { delayInSeconds = client2.getLongRunningOperationRetryTimeout(); } } if (shouldTrace) { CloudTracing.exit(invocationId, result); } if (result.getStatus() != OperationStatus.Succeeded) { if (result.getError() != null) { ServiceException ex = new ServiceException(result.getError().getCode() + " : " + result.getError().getMessage()); ex.setError(new CloudError()); ex.getError().setCode(result.getError().getCode()); ex.getError().setMessage(result.getError().getMessage()); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } else { ServiceException ex = new ServiceException(""); if (shouldTrace) { CloudTracing.error(invocationId, ex); } throw ex; } } return result; } finally { if (client2 != null && shouldTrace) { client2.close(); } } } }