gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright 2008 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.data.restricted; import com.google.common.collect.Lists; import com.google.template.soy.data.SoyData; import com.google.template.soy.data.SoyDataException; import com.google.template.soy.data.SoyListData; import com.google.template.soy.data.SoyMapData; import java.util.List; /** * Abstract superclass for a node in a Soy data tree that represents a collection of data (i.e. an * internal node). * * <p> Important: This class may only be used in implementing plugins (e.g. functions, directives). * */ public abstract class CollectionData extends SoyData { // ------------ put() ------------ /** * Convenience function to put multiple mappings in one call. * @param data The mappings to put, as alternating keys/values. Indices 0, 2, 4, ... must be valid * key strings. Indices 1, 3, 5, ... must be valid Soy data values. * @throws SoyDataException When attempting to add an invalid varargs list or a mapping containing * an invalid key. */ public void put(Object... data) { // TODO: Perhaps change to only convert varargs to Map, and do put(Map) elsewhere. if (data.length % 2 != 0) { throw new SoyDataException( "Varargs to put(...) must have an even number of arguments (key-value pairs)."); } for (int i = 0; i < data.length; i += 2) { try { put((String) data[i], SoyData.createFromExistingData(data[i + 1])); } catch (ClassCastException cce) { throw new SoyDataException( "Attempting to add a mapping containing a non-string key (key type " + data[i].getClass().getName() + ")."); } } } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, SoyData value) { List<String> keys = split(keyStr, '.'); int numKeys = keys.size(); CollectionData collectionData = this; for (int i = 0; i <= numKeys - 2; ++i) { SoyData nextSoyData = collectionData.getSingle(keys.get(i)); if (nextSoyData != null && !(nextSoyData instanceof CollectionData)) { throw new SoyDataException( "Failed to evaluate key string \"" + keyStr + "\" for put()."); } CollectionData nextCollectionData = (CollectionData) nextSoyData; if (nextCollectionData == null) { // Create the SoyData object that will be bound to keys.get(i). We need to check the first // part of keys[i+1] to know whether to create a SoyMapData or SoyListData (checking the // first char is sufficient). nextCollectionData = (Character.isDigit(keys.get(i + 1).charAt(0))) ? new SoyListData() : new SoyMapData(); collectionData.putSingle(keys.get(i), nextCollectionData); } collectionData = nextCollectionData; } collectionData.putSingle(keys.get(numKeys - 1), ensureValidValue(value)); } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, boolean value) { put(keyStr, BooleanData.forValue(value)); } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, int value) { put(keyStr, IntegerData.forValue(value)); } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, double value) { put(keyStr, FloatData.forValue(value)); } /** * Puts data into this data tree at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @param value The data to put at the specified location. */ public void put(String keyStr, String value) { put(keyStr, StringData.forValue(value)); } // ------------ remove() ------------ /** * Removes the data at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. */ public void remove(String keyStr) { List<String> keys = split(keyStr, '.'); int numKeys = keys.size(); CollectionData collectionData = this; for (int i = 0; i <= numKeys - 2; ++i) { SoyData soyData = collectionData.getSingle(keys.get(i)); if (soyData == null || !(soyData instanceof CollectionData)) { return; } collectionData = (CollectionData) soyData; } collectionData.removeSingle(keys.get(numKeys - 1)); } // ------------ get*() ------------ /** * Gets the data at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The data at the specified key string, or null if there's no data at the location. */ public SoyData get(String keyStr) { List<String> keys = split(keyStr, '.'); int numKeys = keys.size(); CollectionData collectionData = this; for (int i = 0; i <= numKeys - 2; ++i) { SoyData soyData = collectionData.getSingle(keys.get(i)); if (soyData == null || !(soyData instanceof CollectionData)) { return null; } collectionData = (CollectionData) soyData; } return collectionData.getSingle(keys.get(numKeys - 1)); } /** * Precondition: The specified key string is the path to a SoyMapData object. * Gets the SoyMapData at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The SoyMapData at the specified key string, or null if no data is stored there. */ public SoyMapData getMapData(String keyStr) { return (SoyMapData) get(keyStr); } /** * Precondition: The specified key string is the path to a SoyListData object. * Gets the SoyListData at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The SoyListData at the specified key string, or null if no data is stored there. */ public SoyListData getListData(String keyStr) { return (SoyListData) get(keyStr); } /** * Precondition: The specified key string is the path to a boolean. * Gets the boolean at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The boolean at the specified key string. * @throws IllegalArgumentException If no data is stored at the specified key. */ public boolean getBoolean(String keyStr) { SoyData valueData = get(keyStr); if (valueData == null) { throw new IllegalArgumentException("Missing key: " + keyStr); } return valueData.booleanValue(); } /** * Precondition: The specified key string is the path to an integer. * Gets the integer at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The integer at the specified key string. * @throws IllegalArgumentException If no data is stored at the specified key. */ public int getInteger(String keyStr) { SoyData valueData = get(keyStr); if (valueData == null) { throw new IllegalArgumentException("Missing key: " + keyStr); } return valueData.integerValue(); } /** * Precondition: The specified key string is the path to a float. * Gets the float at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The float at the specified key string. * @throws IllegalArgumentException If no data is stored at the specified key. */ public double getFloat(String keyStr) { SoyData valueData = get(keyStr); if (valueData == null) { throw new IllegalArgumentException("Missing key: " + keyStr); } return valueData.floatValue(); } /** * Precondition: The specified key string is the path to a string. * Gets the string at the specified key string. * @param keyStr One or more map keys and/or list indices (separated by '.' if multiple parts). * Indicates the path to the location within this data tree. * @return The string at the specified key string. * @throws IllegalArgumentException If no data is stored at the specified key. */ public String getString(String keyStr) { SoyData valueData = get(keyStr); if (valueData == null) { throw new IllegalArgumentException("Missing key: " + keyStr); } return valueData.stringValue(); } // ----------------------------------------------------------------------------------------------- // Superpackage-private methods. /** * Important: Do not use outside of Soy code (treat as superpackage-private). * * Puts data into this data object at the specified key. * @param key An individual key. * @param value The data to put at the specified key. */ public abstract void putSingle(String key, SoyData value); /** * Important: Do not use outside of Soy code (treat as superpackage-private). * * Removes the data at the specified key. * @param key An individual key. */ public abstract void removeSingle(String key); /** * Important: Do not use outside of Soy code (treat as superpackage-private). * * Gets the data at the specified key. * @param key An individual key. * @return The data at the specified key, or null if the key is not defined. */ public abstract SoyData getSingle(String key); // ----------------------------------------------------------------------------------------------- // Protected/private helpers. /** * Ensures that the given value is valid for insertion into a Soy data tree. If the value is not * null, then simply returns it, else return NullData. * @param value The value to ensure validity for. * @return The given value if it's not null, or NullData if it is null. */ protected static SoyData ensureValidValue(SoyData value) { return (value != null) ? value : NullData.INSTANCE; } /** * Splits a string into tokens at the specified delimiter. * @param str The string to split. Must not be null. * @param delim The delimiter character. * @return A list of tokens. Will not return null. */ private static List<String> split(String str, char delim) { List<String> result = Lists.newArrayList(); int currPartStart = 0; while (true) { int currPartEnd = str.indexOf(delim, currPartStart); if (currPartEnd == -1) { result.add(str.substring(currPartStart)); break; } else { result.add(str.substring(currPartStart, currPartEnd)); currPartStart = currPartEnd + 1; } } return result; } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.globant.mobile.handson.util; import java.io.BufferedInputStream; import java.io.BufferedWriter; import java.io.Closeable; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringWriter; import java.io.Writer; import java.lang.reflect.Array; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; /** ****************************************************************************** * Taken from the JB source code, can be found in: * libcore/luni/src/main/java/libcore/io/DiskLruCache.java * or direct link: * https://android.googlesource.com/platform/libcore/+/android-4.1.1_r1/luni/src/main/java/libcore/io/DiskLruCache.java ****************************************************************************** * * A cache that uses a bounded amount of space on a filesystem. Each cache * entry has a string key and a fixed number of values. Values are byte * sequences, accessible as streams or files. Each value must be between {@code * 0} and {@code Integer.MAX_VALUE} bytes in length. * * <p>The cache stores its data in a directory on the filesystem. This * directory must be exclusive to the cache; the cache may delete or overwrite * files from its directory. It is an error for multiple processes to use the * same cache directory at the same time. * * <p>This cache limits the number of bytes that it will store on the * filesystem. When the number of stored bytes exceeds the limit, the cache will * remove entries in the background until the limit is satisfied. The limit is * not strict: the cache may temporarily exceed it while waiting for files to be * deleted. The limit does not include filesystem overhead or the cache * journal so space-sensitive applications should set a conservative limit. * * <p>Clients call {@link #edit} to create or update the values of an entry. An * entry may have only one editor at one time; if a value is not available to be * edited then {@link #edit} will return null. * <ul> * <li>When an entry is being <strong>created</strong> it is necessary to * supply a full set of values; the empty value should be used as a * placeholder if necessary. * <li>When an entry is being <strong>edited</strong>, it is not necessary * to supply data for every value; values default to their previous * value. * </ul> * Every {@link #edit} call must be matched by a call to {@link Editor#commit} * or {@link Editor#abort}. Committing is atomic: a read observes the full set * of values as they were before or after the commit, but never a mix of values. * * <p>Clients call {@link #get} to read a snapshot of an entry. The read will * observe the value at the time that {@link #get} was called. Updates and * removals after the call do not impact ongoing reads. * * <p>This class is tolerant of some I/O errors. If files are missing from the * filesystem, the corresponding entries will be dropped from the cache. If * an error occurs while writing a cache value, the edit will fail silently. * Callers should handle other problems by catching {@code IOException} and * responding appropriately. */ public final class DiskLruCache implements Closeable { static final String JOURNAL_FILE = "journal"; static final String JOURNAL_FILE_TMP = "journal.tmp"; static final String MAGIC = "libcore.io.DiskLruCache"; static final String VERSION_1 = "1"; static final long ANY_SEQUENCE_NUMBER = -1; private static final String CLEAN = "CLEAN"; private static final String DIRTY = "DIRTY"; private static final String REMOVE = "REMOVE"; private static final String READ = "READ"; private static final Charset UTF_8 = Charset.forName("UTF-8"); private static final int IO_BUFFER_SIZE = 8 * 1024; /* * This cache uses a journal file named "journal". A typical journal file * looks like this: * libcore.io.DiskLruCache * 1 * 100 * 2 * * CLEAN 3400330d1dfc7f3f7f4b8d4d803dfcf6 832 21054 * DIRTY 335c4c6028171cfddfbaae1a9c313c52 * CLEAN 335c4c6028171cfddfbaae1a9c313c52 3934 2342 * REMOVE 335c4c6028171cfddfbaae1a9c313c52 * DIRTY 1ab96a171faeeee38496d8b330771a7a * CLEAN 1ab96a171faeeee38496d8b330771a7a 1600 234 * READ 335c4c6028171cfddfbaae1a9c313c52 * READ 3400330d1dfc7f3f7f4b8d4d803dfcf6 * * The first five lines of the journal form its header. They are the * constant string "libcore.io.DiskLruCache", the disk cache's version, * the application's version, the value count, and a blank line. * * Each of the subsequent lines in the file is a record of the state of a * cache entry. Each line contains space-separated values: a state, a key, * and optional state-specific values. * o DIRTY lines track that an entry is actively being created or updated. * Every successful DIRTY action should be followed by a CLEAN or REMOVE * action. DIRTY lines without a matching CLEAN or REMOVE indicate that * temporary files may need to be deleted. * o CLEAN lines track a cache entry that has been successfully published * and may be read. A publish line is followed by the lengths of each of * its values. * o READ lines track accesses for LRU. * o REMOVE lines track entries that have been deleted. * * The journal file is appended to as cache operations occur. The journal may * occasionally be compacted by dropping redundant lines. A temporary file named * "journal.tmp" will be used during compaction; that file should be deleted if * it exists when the cache is opened. */ private final File directory; private final File journalFile; private final File journalFileTmp; private final int appVersion; private final long maxSize; private final int valueCount; private long size = 0; private Writer journalWriter; private final LinkedHashMap<String, Entry> lruEntries = new LinkedHashMap<String, Entry>(0, 0.75f, true); private int redundantOpCount; /** * To differentiate between old and current snapshots, each entry is given * a sequence number each time an edit is committed. A snapshot is stale if * its sequence number is not equal to its entry's sequence number. */ private long nextSequenceNumber = 0; /* From java.util.Arrays */ @SuppressWarnings("unchecked") private static <T> T[] copyOfRange(T[] original, int start, int end) { final int originalLength = original.length; // For exception priority compatibility. if (start > end) { throw new IllegalArgumentException(); } if (start < 0 || start > originalLength) { throw new ArrayIndexOutOfBoundsException(); } final int resultLength = end - start; final int copyLength = Math.min(resultLength, originalLength - start); final T[] result = (T[]) Array .newInstance(original.getClass().getComponentType(), resultLength); System.arraycopy(original, start, result, 0, copyLength); return result; } /** * Returns the remainder of 'reader' as a string, closing it when done. */ public static String readFully(Reader reader) throws IOException { try { StringWriter writer = new StringWriter(); char[] buffer = new char[1024]; int count; while ((count = reader.read(buffer)) != -1) { writer.write(buffer, 0, count); } return writer.toString(); } finally { reader.close(); } } /** * Returns the ASCII characters up to but not including the next "\r\n", or * "\n". * * @throws java.io.EOFException if the stream is exhausted before the next newline * character. */ public static String readAsciiLine(InputStream in) throws IOException { // TODO: support UTF-8 here instead StringBuilder result = new StringBuilder(80); while (true) { int c = in.read(); if (c == -1) { throw new EOFException(); } else if (c == '\n') { break; } result.append((char) c); } int length = result.length(); if (length > 0 && result.charAt(length - 1) == '\r') { result.setLength(length - 1); } return result.toString(); } /** * Closes 'closeable', ignoring any checked exceptions. Does nothing if 'closeable' is null. */ public static void closeQuietly(Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (RuntimeException rethrown) { throw rethrown; } catch (Exception ignored) { } } } /** * Recursively delete everything in {@code dir}. */ // TODO: this should specify paths as Strings rather than as Files public static void deleteContents(File dir) throws IOException { File[] files = dir.listFiles(); if (files == null) { throw new IllegalArgumentException("not a directory: " + dir); } for (File file : files) { if (file.isDirectory()) { deleteContents(file); } if (!file.delete()) { throw new IOException("failed to delete file: " + file); } } } /** This cache uses a single background thread to evict entries. */ private final ExecutorService executorService = new ThreadPoolExecutor(0, 1, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()); private final Callable<Void> cleanupCallable = new Callable<Void>() { @Override public Void call() throws Exception { synchronized (DiskLruCache.this) { if (journalWriter == null) { return null; // closed } trimToSize(); if (journalRebuildRequired()) { rebuildJournal(); redundantOpCount = 0; } } return null; } }; private DiskLruCache(File directory, int appVersion, int valueCount, long maxSize) { this.directory = directory; this.appVersion = appVersion; this.journalFile = new File(directory, JOURNAL_FILE); this.journalFileTmp = new File(directory, JOURNAL_FILE_TMP); this.valueCount = valueCount; this.maxSize = maxSize; } /** * Opens the cache in {@code directory}, creating a cache if none exists * there. * * @param directory a writable directory * @param appVersion * @param valueCount the number of values per cache entry. Must be positive. * @param maxSize the maximum number of bytes this cache should use to store * @throws IOException if reading or writing the cache directory fails */ public static DiskLruCache open(File directory, int appVersion, int valueCount, long maxSize) throws IOException { if (maxSize <= 0) { throw new IllegalArgumentException("maxSize <= 0"); } if (valueCount <= 0) { throw new IllegalArgumentException("valueCount <= 0"); } // prefer to pick up where we left off DiskLruCache cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); if (cache.journalFile.exists()) { try { cache.readJournal(); cache.processJournal(); cache.journalWriter = new BufferedWriter(new FileWriter(cache.journalFile, true), IO_BUFFER_SIZE); return cache; } catch (IOException journalIsCorrupt) { // System.logW("DiskLruCache " + directory + " is corrupt: " // + journalIsCorrupt.getMessage() + ", removing"); cache.delete(); } } // create a new empty cache directory.mkdirs(); cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); cache.rebuildJournal(); return cache; } private void readJournal() throws IOException { InputStream in = new BufferedInputStream(new FileInputStream(journalFile), IO_BUFFER_SIZE); try { String magic = readAsciiLine(in); String version = readAsciiLine(in); String appVersionString = readAsciiLine(in); String valueCountString = readAsciiLine(in); String blank = readAsciiLine(in); if (!MAGIC.equals(magic) || !VERSION_1.equals(version) || !Integer.toString(appVersion).equals(appVersionString) || !Integer.toString(valueCount).equals(valueCountString) || !"".equals(blank)) { throw new IOException("unexpected journal header: [" + magic + ", " + version + ", " + valueCountString + ", " + blank + "]"); } while (true) { try { readJournalLine(readAsciiLine(in)); } catch (EOFException endOfJournal) { break; } } } finally { closeQuietly(in); } } private void readJournalLine(String line) throws IOException { String[] parts = line.split(" "); if (parts.length < 2) { throw new IOException("unexpected journal line: " + line); } String key = parts[1]; if (parts[0].equals(REMOVE) && parts.length == 2) { lruEntries.remove(key); return; } Entry entry = lruEntries.get(key); if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } if (parts[0].equals(CLEAN) && parts.length == 2 + valueCount) { entry.readable = true; entry.currentEditor = null; entry.setLengths(copyOfRange(parts, 2, parts.length)); } else if (parts[0].equals(DIRTY) && parts.length == 2) { entry.currentEditor = new Editor(entry); } else if (parts[0].equals(READ) && parts.length == 2) { // this work was already done by calling lruEntries.get() } else { throw new IOException("unexpected journal line: " + line); } } /** * Computes the initial size and collects garbage as a part of opening the * cache. Dirty entries are assumed to be inconsistent and will be deleted. */ private void processJournal() throws IOException { deleteIfExists(journalFileTmp); for (Iterator<Entry> i = lruEntries.values().iterator(); i.hasNext(); ) { Entry entry = i.next(); if (entry.currentEditor == null) { for (int t = 0; t < valueCount; t++) { size += entry.lengths[t]; } } else { entry.currentEditor = null; for (int t = 0; t < valueCount; t++) { deleteIfExists(entry.getCleanFile(t)); deleteIfExists(entry.getDirtyFile(t)); } i.remove(); } } } /** * Creates a new journal that omits redundant information. This replaces the * current journal if it exists. */ private synchronized void rebuildJournal() throws IOException { if (journalWriter != null) { journalWriter.close(); } Writer writer = new BufferedWriter(new FileWriter(journalFileTmp), IO_BUFFER_SIZE); writer.write(MAGIC); writer.write("\n"); writer.write(VERSION_1); writer.write("\n"); writer.write(Integer.toString(appVersion)); writer.write("\n"); writer.write(Integer.toString(valueCount)); writer.write("\n"); writer.write("\n"); for (Entry entry : lruEntries.values()) { if (entry.currentEditor != null) { writer.write(DIRTY + ' ' + entry.key + '\n'); } else { writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); } } writer.close(); journalFileTmp.renameTo(journalFile); journalWriter = new BufferedWriter(new FileWriter(journalFile, true), IO_BUFFER_SIZE); } private static void deleteIfExists(File file) throws IOException { // try { // Libcore.os.remove(file.getPath()); // } catch (ErrnoException errnoException) { // if (errnoException.errno != OsConstants.ENOENT) { // throw errnoException.rethrowAsIOException(); // } // } if (file.exists() && !file.delete()) { throw new IOException(); } } /** * Returns a snapshot of the entry named {@code key}, or null if it doesn't * exist is not currently readable. If a value is returned, it is moved to * the head of the LRU queue. */ public synchronized Snapshot get(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null) { return null; } if (!entry.readable) { return null; } /* * Open all streams eagerly to guarantee that we see a single published * snapshot. If we opened streams lazily then the streams could come * from different edits. */ InputStream[] ins = new InputStream[valueCount]; try { for (int i = 0; i < valueCount; i++) { ins[i] = new FileInputStream(entry.getCleanFile(i)); } } catch (FileNotFoundException e) { // a file must have been deleted manually! return null; } redundantOpCount++; journalWriter.append(READ + ' ' + key + '\n'); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return new Snapshot(key, entry.sequenceNumber, ins); } /** * Returns an editor for the entry named {@code key}, or null if another * edit is in progress. */ public Editor edit(String key) throws IOException { return edit(key, ANY_SEQUENCE_NUMBER); } private synchronized Editor edit(String key, long expectedSequenceNumber) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (expectedSequenceNumber != ANY_SEQUENCE_NUMBER && (entry == null || entry.sequenceNumber != expectedSequenceNumber)) { return null; // snapshot is stale } if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } else if (entry.currentEditor != null) { return null; // another edit is in progress } Editor editor = new Editor(entry); entry.currentEditor = editor; // flush the journal before creating files to prevent file leaks journalWriter.write(DIRTY + ' ' + key + '\n'); journalWriter.flush(); return editor; } /** * Returns the directory where this cache stores its data. */ public File getDirectory() { return directory; } /** * Returns the maximum number of bytes that this cache should use to store * its data. */ public long maxSize() { return maxSize; } /** * Returns the number of bytes currently being used to store the values in * this cache. This may be greater than the max size if a background * deletion is pending. */ public synchronized long size() { return size; } private synchronized void completeEdit(Editor editor, boolean success) throws IOException { Entry entry = editor.entry; if (entry.currentEditor != editor) { throw new IllegalStateException(); } // if this edit is creating the entry for the first time, every index must have a value if (success && !entry.readable) { for (int i = 0; i < valueCount; i++) { if (!entry.getDirtyFile(i).exists()) { editor.abort(); throw new IllegalStateException("edit didn't create file " + i); } } } for (int i = 0; i < valueCount; i++) { File dirty = entry.getDirtyFile(i); if (success) { if (dirty.exists()) { File clean = entry.getCleanFile(i); dirty.renameTo(clean); long oldLength = entry.lengths[i]; long newLength = clean.length(); entry.lengths[i] = newLength; size = size - oldLength + newLength; } } else { deleteIfExists(dirty); } } redundantOpCount++; entry.currentEditor = null; if (entry.readable | success) { entry.readable = true; journalWriter.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); if (success) { entry.sequenceNumber = nextSequenceNumber++; } } else { lruEntries.remove(entry.key); journalWriter.write(REMOVE + ' ' + entry.key + '\n'); } if (size > maxSize || journalRebuildRequired()) { executorService.submit(cleanupCallable); } } /** * We only rebuild the journal when it will halve the size of the journal * and eliminate at least 2000 ops. */ private boolean journalRebuildRequired() { final int REDUNDANT_OP_COMPACT_THRESHOLD = 2000; return redundantOpCount >= REDUNDANT_OP_COMPACT_THRESHOLD && redundantOpCount >= lruEntries.size(); } /** * Drops the entry for {@code key} if it exists and can be removed. Entries * actively being edited cannot be removed. * * @return true if an entry was removed. */ public synchronized boolean remove(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null || entry.currentEditor != null) { return false; } for (int i = 0; i < valueCount; i++) { File file = entry.getCleanFile(i); if (!file.delete()) { throw new IOException("failed to delete " + file); } size -= entry.lengths[i]; entry.lengths[i] = 0; } redundantOpCount++; journalWriter.append(REMOVE + ' ' + key + '\n'); lruEntries.remove(key); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return true; } /** * Returns true if this cache has been closed. */ public boolean isClosed() { return journalWriter == null; } private void checkNotClosed() { if (journalWriter == null) { throw new IllegalStateException("cache is closed"); } } /** * Force buffered operations to the filesystem. */ public synchronized void flush() throws IOException { checkNotClosed(); trimToSize(); journalWriter.flush(); } /** * Closes this cache. Stored values will remain on the filesystem. */ public synchronized void close() throws IOException { if (journalWriter == null) { return; // already closed } for (Entry entry : new ArrayList<Entry>(lruEntries.values())) { if (entry.currentEditor != null) { entry.currentEditor.abort(); } } trimToSize(); journalWriter.close(); journalWriter = null; } private void trimToSize() throws IOException { while (size > maxSize) { // Map.Entry<String, Entry> toEvict = lruEntries.eldest(); final Map.Entry<String, Entry> toEvict = lruEntries.entrySet().iterator().next(); remove(toEvict.getKey()); } } /** * Closes the cache and deletes all of its stored values. This will delete * all files in the cache directory including files that weren't created by * the cache. */ public void delete() throws IOException { close(); deleteContents(directory); } private void validateKey(String key) { if (key.contains(" ") || key.contains("\n") || key.contains("\r")) { throw new IllegalArgumentException( "keys must not contain spaces or newlines: \"" + key + "\""); } } private static String inputStreamToString(InputStream in) throws IOException { return readFully(new InputStreamReader(in, UTF_8)); } /** * A snapshot of the values for an entry. */ public final class Snapshot implements Closeable { private final String key; private final long sequenceNumber; private final InputStream[] ins; private Snapshot(String key, long sequenceNumber, InputStream[] ins) { this.key = key; this.sequenceNumber = sequenceNumber; this.ins = ins; } /** * Returns an editor for this snapshot's entry, or null if either the * entry has changed since this snapshot was created or if another edit * is in progress. */ public Editor edit() throws IOException { return DiskLruCache.this.edit(key, sequenceNumber); } /** * Returns the unbuffered stream with the value for {@code index}. */ public InputStream getInputStream(int index) { return ins[index]; } /** * Returns the string value for {@code index}. */ public String getString(int index) throws IOException { return inputStreamToString(getInputStream(index)); } @Override public void close() { for (InputStream in : ins) { closeQuietly(in); } } } /** * Edits the values for an entry. */ public final class Editor { private final Entry entry; private boolean hasErrors; private Editor(Entry entry) { this.entry = entry; } /** * Returns an unbuffered input stream to read the last committed value, * or null if no value has been committed. */ public InputStream newInputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } if (!entry.readable) { return null; } return new FileInputStream(entry.getCleanFile(index)); } } /** * Returns the last committed value as a string, or null if no value * has been committed. */ public String getString(int index) throws IOException { InputStream in = newInputStream(index); return in != null ? inputStreamToString(in) : null; } /** * Returns a new unbuffered output stream to write the value at * {@code index}. If the underlying output stream encounters errors * when writing to the filesystem, this edit will be aborted when * {@link #commit} is called. The returned output stream does not throw * IOExceptions. */ public OutputStream newOutputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } return new FaultHidingOutputStream(new FileOutputStream(entry.getDirtyFile(index))); } } /** * Sets the value at {@code index} to {@code value}. */ public void set(int index, String value) throws IOException { Writer writer = null; try { writer = new OutputStreamWriter(newOutputStream(index), UTF_8); writer.write(value); } finally { closeQuietly(writer); } } /** * Commits this edit so it is visible to readers. This releases the * edit lock so another edit may be started on the same key. */ public void commit() throws IOException { if (hasErrors) { completeEdit(this, false); remove(entry.key); // the previous entry is stale } else { completeEdit(this, true); } } /** * Aborts this edit. This releases the edit lock so another edit may be * started on the same key. */ public void abort() throws IOException { completeEdit(this, false); } private class FaultHidingOutputStream extends FilterOutputStream { private FaultHidingOutputStream(OutputStream out) { super(out); } @Override public void write(int oneByte) { try { out.write(oneByte); } catch (IOException e) { hasErrors = true; } } @Override public void write(byte[] buffer, int offset, int length) { try { out.write(buffer, offset, length); } catch (IOException e) { hasErrors = true; } } @Override public void close() { try { out.close(); } catch (IOException e) { hasErrors = true; } } @Override public void flush() { try { out.flush(); } catch (IOException e) { hasErrors = true; } } } } private final class Entry { private final String key; /** Lengths of this entry's files. */ private final long[] lengths; /** True if this entry has ever been published */ private boolean readable; /** The ongoing edit or null if this entry is not being edited. */ private Editor currentEditor; /** The sequence number of the most recently committed edit to this entry. */ private long sequenceNumber; private Entry(String key) { this.key = key; this.lengths = new long[valueCount]; } public String getLengths() throws IOException { StringBuilder result = new StringBuilder(); for (long size : lengths) { result.append(' ').append(size); } return result.toString(); } /** * Set lengths using decimal numbers like "10123". */ private void setLengths(String[] strings) throws IOException { if (strings.length != valueCount) { throw invalidLengths(strings); } try { for (int i = 0; i < strings.length; i++) { lengths[i] = Long.parseLong(strings[i]); } } catch (NumberFormatException e) { throw invalidLengths(strings); } } private IOException invalidLengths(String[] strings) throws IOException { throw new IOException("unexpected journal line: " + Arrays.toString(strings)); } public File getCleanFile(int i) { return new File(directory, key + "." + i); } public File getDirtyFile(int i) { return new File(directory, key + "." + i + ".tmp"); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.carbondata.core.reader; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.apache.carbondata.common.factory.CarbonCommonFactory; import org.apache.carbondata.core.cache.dictionary.ColumnDictionaryChunkIterator; import org.apache.carbondata.core.carbon.CarbonTableIdentifier; import org.apache.carbondata.core.carbon.ColumnIdentifier; import org.apache.carbondata.core.carbon.path.CarbonTablePath; import org.apache.carbondata.core.service.PathService; import org.apache.carbondata.format.ColumnDictionaryChunk; import org.apache.thrift.TBase; /** * This class performs the functionality of reading a carbon dictionary file. * It implements various overloaded method for read functionality. */ public class CarbonDictionaryReaderImpl implements CarbonDictionaryReader { /** * carbon table identifier */ protected CarbonTableIdentifier carbonTableIdentifier; /** * carbon dictionary data store path */ protected String storePath; /** * column name */ protected ColumnIdentifier columnIdentifier; /** * dictionary file path */ protected String columnDictionaryFilePath; /** * dictionary thrift file reader */ private ThriftReader dictionaryFileReader; /** * Constructor * * @param storePath carbon dictionary data store path * @param carbonTableIdentifier table identifier which will give table name and database name * @param columnIdentifier column unique identifier */ public CarbonDictionaryReaderImpl(String storePath, CarbonTableIdentifier carbonTableIdentifier, ColumnIdentifier columnIdentifier) { this.storePath = storePath; this.carbonTableIdentifier = carbonTableIdentifier; this.columnIdentifier = columnIdentifier; initFileLocation(); } /** * This method should be used when complete dictionary data needs to be read. * Applicable scenarios : * 1. Global dictionary generation in case of incremental load * 2. Reading dictionary file on first time query * 3. Loading a dictionary column in memory based on query requirement. * This is a case where carbon column cache feature is enabled in which a * column dictionary is read if it is present in the query. * * @return list of byte array. Each byte array is unique dictionary value * @throws IOException if an I/O error occurs */ @Override public List<byte[]> read() throws IOException { return read(0L); } /** * This method should be used when data has to be read from a given offset. * Applicable scenarios : * 1. Incremental data load. If column dictionary is already loaded in memory * and incremental load is done, then for the new query only new dictionary data * has to be read form memory. * * @param startOffset start offset of dictionary file * @return list of byte array. Each byte array is unique dictionary value * @throws IOException if an I/O error occurs */ @Override public List<byte[]> read(long startOffset) throws IOException { List<CarbonDictionaryColumnMetaChunk> carbonDictionaryColumnMetaChunks = readDictionaryMetadataFile(); // get the last entry for carbon dictionary meta chunk CarbonDictionaryColumnMetaChunk carbonDictionaryColumnMetaChunk = carbonDictionaryColumnMetaChunks.get(carbonDictionaryColumnMetaChunks.size() - 1); // end offset till where the dictionary file has to be read long endOffset = carbonDictionaryColumnMetaChunk.getEnd_offset(); List<ColumnDictionaryChunk> columnDictionaryChunks = read(carbonDictionaryColumnMetaChunks, startOffset, endOffset); return getDictionaryList(columnDictionaryChunks); } /** * This method will be used to read data between given start and end offset. * Applicable scenarios: * 1. Truncate operation. If there is any inconsistency while writing the dictionary file * then we can give the start and end offset till where the data has to be retained. * * @param startOffset start offset of dictionary file * @param endOffset end offset of dictionary file * @return iterator over byte array. Each byte array is unique dictionary value * @throws IOException if an I/O error occurs */ @Override public Iterator<byte[]> read(long startOffset, long endOffset) throws IOException { List<CarbonDictionaryColumnMetaChunk> carbonDictionaryColumnMetaChunks = readDictionaryMetadataFile(); List<ColumnDictionaryChunk> columnDictionaryChunks = read(carbonDictionaryColumnMetaChunks, startOffset, endOffset); Iterator<byte[]> columnDictionaryChunkWrapper = new ColumnDictionaryChunkIterator(columnDictionaryChunks); return columnDictionaryChunkWrapper; } /** * Closes this stream and releases any system resources associated * with it. If the stream is already closed then invoking this * method has no effect. * * @throws IOException if an I/O error occurs */ @Override public void close() throws IOException { if (null != dictionaryFileReader) { dictionaryFileReader.close(); dictionaryFileReader = null; } } /** * @param carbonDictionaryColumnMetaChunks dictionary meta chunk list * @param startOffset start offset for dictionary data file * @param endOffset end offset till where data has * to be read from dictionary data file * @return list of byte column dictionary values * @throws IOException readDictionary file method throws IO exception */ private List<ColumnDictionaryChunk> read( List<CarbonDictionaryColumnMetaChunk> carbonDictionaryColumnMetaChunks, long startOffset, long endOffset) throws IOException { // calculate the number of chunks to be read from dictionary file from start offset int dictionaryChunkCountsToBeRead = calculateTotalDictionaryChunkCountsToBeRead(carbonDictionaryColumnMetaChunks, startOffset, endOffset); // open dictionary file thrift reader openThriftReader(); // read the required number of chunks from dictionary file List<ColumnDictionaryChunk> columnDictionaryChunks = readDictionaryFile(startOffset, dictionaryChunkCountsToBeRead); return columnDictionaryChunks; } /** * This method will put all the dictionary chunks into one list and return that list * * @param columnDictionaryChunks * @return */ private List<byte[]> getDictionaryList(List<ColumnDictionaryChunk> columnDictionaryChunks) { int dictionaryListSize = 0; for (ColumnDictionaryChunk dictionaryChunk : columnDictionaryChunks) { dictionaryListSize = dictionaryListSize + dictionaryChunk.getValues().size(); } // convert byte buffer list to byte array list of dictionary values List<byte[]> dictionaryValues = new ArrayList<byte[]>(dictionaryListSize); for (ColumnDictionaryChunk dictionaryChunk : columnDictionaryChunks) { convertAndFillByteBufferListToByteArrayList(dictionaryValues, dictionaryChunk.getValues()); } return dictionaryValues; } /** * This method will convert and fill list of byte buffer to list of byte array * * @param dictionaryValues list of byte array. Each byte array is * unique dictionary value * @param dictionaryValueBufferList dictionary thrift object which is a list of byte buffer. * Each dictionary value is a wrapped in byte buffer before * writing to file */ private void convertAndFillByteBufferListToByteArrayList(List<byte[]> dictionaryValues, List<ByteBuffer> dictionaryValueBufferList) { for (ByteBuffer buffer : dictionaryValueBufferList) { int length = buffer.limit(); byte[] value = new byte[length]; buffer.get(value, 0, value.length); dictionaryValues.add(value); } } /** * This method will form the path for dictionary file for a given column */ protected void initFileLocation() { PathService pathService = CarbonCommonFactory.getPathService(); CarbonTablePath carbonTablePath = pathService.getCarbonTablePath( this.storePath, carbonTableIdentifier); this.columnDictionaryFilePath = carbonTablePath .getDictionaryFilePath(columnIdentifier.getColumnId()); } /** * This method will read the dictionary file and return the list of dictionary thrift object * * @param dictionaryStartOffset start offset for dictionary file * @param dictionaryChunkCountToBeRead number of dictionary chunks to be read * @return list of dictionary chunks * @throws IOException setReadOffset method throws I/O exception */ private List<ColumnDictionaryChunk> readDictionaryFile(long dictionaryStartOffset, int dictionaryChunkCountToBeRead) throws IOException { List<ColumnDictionaryChunk> dictionaryChunks = new ArrayList<ColumnDictionaryChunk>(dictionaryChunkCountToBeRead); // skip the number of bytes if a start offset is given dictionaryFileReader.setReadOffset(dictionaryStartOffset); // read till dictionary chunk count while (dictionaryFileReader.hasNext() && dictionaryChunks.size() != dictionaryChunkCountToBeRead) { dictionaryChunks.add((ColumnDictionaryChunk) dictionaryFileReader.read()); } return dictionaryChunks; } /** * This method will read the dictionary metadata file for a given column * and calculate the number of chunks to be read from the dictionary file. * It will do a strict validation for start and end offset as if the offsets are not * exactly matching, because data is written in thrift format, the thrift object * will not be retrieved properly * * @param dictionaryChunkMetaList list of dictionary chunk metadata * @param dictionaryChunkStartOffset start offset for a dictionary chunk * @param dictionaryChunkEndOffset end offset for a dictionary chunk * @return */ private int calculateTotalDictionaryChunkCountsToBeRead( List<CarbonDictionaryColumnMetaChunk> dictionaryChunkMetaList, long dictionaryChunkStartOffset, long dictionaryChunkEndOffset) { boolean chunkWithStartOffsetFound = false; int dictionaryChunkCount = 0; for (CarbonDictionaryColumnMetaChunk metaChunk : dictionaryChunkMetaList) { // find the column meta chunk whose start offset value matches // with the given dictionary start offset if (!chunkWithStartOffsetFound && dictionaryChunkStartOffset == metaChunk.getStart_offset()) { chunkWithStartOffsetFound = true; } // start offset is found then keep adding the chunk count to be read if (chunkWithStartOffsetFound) { dictionaryChunkCount = dictionaryChunkCount + metaChunk.getChunk_count(); } // when end offset is reached then break the loop if (dictionaryChunkEndOffset == metaChunk.getEnd_offset()) { break; } } return dictionaryChunkCount; } /** * This method will read dictionary metadata file and return the dictionary meta chunks * * @return list of dictionary metadata chunks * @throws IOException read and close method throws IO exception */ private List<CarbonDictionaryColumnMetaChunk> readDictionaryMetadataFile() throws IOException { CarbonDictionaryMetadataReader columnMetadataReaderImpl = getDictionaryMetadataReader(); List<CarbonDictionaryColumnMetaChunk> dictionaryMetaChunkList = null; // read metadata file try { dictionaryMetaChunkList = columnMetadataReaderImpl.read(); } finally { // close the metadata reader columnMetadataReaderImpl.close(); } return dictionaryMetaChunkList; } /** * @return */ protected CarbonDictionaryMetadataReader getDictionaryMetadataReader() { return new CarbonDictionaryMetadataReaderImpl(this.storePath, carbonTableIdentifier, this.columnIdentifier); } /** * This method will open the dictionary file stream for reading * * @throws IOException thrift reader open method throws IOException */ private void openThriftReader() throws IOException { if (null == dictionaryFileReader) { // initialise dictionary file reader which will return dictionary thrift object // dictionary thrift object contains a list of byte buffer dictionaryFileReader = new ThriftReader(this.columnDictionaryFilePath, new ThriftReader.TBaseCreator() { @Override public TBase create() { return new ColumnDictionaryChunk(); } }); // Open dictionary file reader dictionaryFileReader.open(); } } }
/** * Logback: the reliable, generic, fast and flexible logging framework. * Copyright (C) 1999-2015, QOS.ch. All rights reserved. * * This program and the accompanying materials are dual-licensed under * either the terms of the Eclipse Public License v1.0 as published by * the Eclipse Foundation * * or (per the licensee's choosing) * * under the terms of the GNU Lesser General Public License version 2.1 * as published by the Free Software Foundation. */ package ch.qos.logback.classic.selector; import static ch.qos.logback.classic.ClassicConstants.JNDI_CONFIGURATION_RESOURCE; import static ch.qos.logback.classic.ClassicConstants.JNDI_CONTEXT_NAME; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.naming.Context; import javax.naming.NamingException; import ch.qos.logback.classic.LoggerContext; import ch.qos.logback.classic.joran.JoranConfigurator; import ch.qos.logback.classic.util.ContextInitializer; import ch.qos.logback.classic.util.JNDIUtil; import ch.qos.logback.core.joran.spi.JoranException; import ch.qos.logback.core.status.InfoStatus; import ch.qos.logback.core.status.StatusManager; import ch.qos.logback.core.status.StatusUtil; import ch.qos.logback.core.status.WarnStatus; import ch.qos.logback.core.util.Loader; import ch.qos.logback.core.util.StatusPrinter; /** * A class that allows the LoggerFactory to access an environment-based * LoggerContext. * <p/> * To add in catalina.sh * <p/> * JAVA_OPTS="$JAVA_OPTS "-Dlogback.ContextSelector=JNDI"" * * @author Ceki G&uuml;lc&uuml; * @author S&eacute;bastien Pennec */ public class ContextJNDISelector implements ContextSelector { private final Map<String, LoggerContext> synchronizedContextMap; private final LoggerContext defaultContext; private static final ThreadLocal<LoggerContext> threadLocal = new ThreadLocal<LoggerContext>(); public ContextJNDISelector(LoggerContext context) { synchronizedContextMap = Collections .synchronizedMap(new HashMap<String, LoggerContext>()); defaultContext = context; } public LoggerContext getDefaultLoggerContext() { return defaultContext; } public LoggerContext detachLoggerContext(String loggerContextName) { return synchronizedContextMap.remove(loggerContextName); } public LoggerContext getLoggerContext() { String contextName = null; Context ctx = null; // First check if ThreadLocal has been set already LoggerContext lc = threadLocal.get(); if (lc != null) { return lc; } try { // We first try to find the name of our // environment's LoggerContext ctx = JNDIUtil.getInitialContext(); contextName = (String) JNDIUtil.lookup(ctx, JNDI_CONTEXT_NAME); } catch (NamingException ne) { // We can't log here } if (contextName == null) { // We return the default context return defaultContext; } else { // Let's see if we already know such a context LoggerContext loggerContext = synchronizedContextMap.get(contextName); if (loggerContext == null) { // We have to create a new LoggerContext loggerContext = new LoggerContext(); loggerContext.setName(contextName); synchronizedContextMap.put(contextName, loggerContext); URL url = findConfigFileURL(ctx, loggerContext); if (url != null) { configureLoggerContextByURL(loggerContext, url); } else { try { new ContextInitializer(loggerContext).autoConfig(); } catch (JoranException je) { } } // logback-292 if (!StatusUtil.contextHasStatusListener(loggerContext)) StatusPrinter.printInCaseOfErrorsOrWarnings(loggerContext); } return loggerContext; } } private String conventionalConfigFileName(String contextName) { return "logback-" + contextName + ".xml"; } private URL findConfigFileURL(Context ctx, LoggerContext loggerContext) { StatusManager sm = loggerContext.getStatusManager(); String jndiEntryForConfigResource = JNDIUtil.lookup(ctx, JNDI_CONFIGURATION_RESOURCE); // Do we have a dedicated configuration file? if (jndiEntryForConfigResource != null) { sm.add(new InfoStatus("Searching for [" + jndiEntryForConfigResource + "]", this)); URL url = urlByResourceName(sm, jndiEntryForConfigResource); if (url == null) { String msg = "The jndi resource [" + jndiEntryForConfigResource + "] for context [" + loggerContext.getName() + "] does not lead to a valid file"; sm.add(new WarnStatus(msg, this)); } return url; } else { String resourceByConvention = conventionalConfigFileName(loggerContext .getName()); return urlByResourceName(sm, resourceByConvention); } } private URL urlByResourceName(StatusManager sm, String resourceName) { sm.add(new InfoStatus("Searching for [" + resourceName + "]", this)); URL url = Loader.getResource(resourceName, Loader.getTCL()); if (url != null) { return url; } return Loader.getResourceBySelfClassLoader(resourceName); } private void configureLoggerContextByURL(LoggerContext context, URL url) { try { JoranConfigurator configurator = new JoranConfigurator(); context.reset(); configurator.setContext(context); configurator.doConfigure(url); } catch (JoranException e) { } StatusPrinter.printInCaseOfErrorsOrWarnings(context); } public List<String> getContextNames() { List<String> list = new ArrayList<String>(); list.addAll(synchronizedContextMap.keySet()); return list; } public LoggerContext getLoggerContext(String name) { return synchronizedContextMap.get(name); } /** * Returns the number of managed contexts Used for testing purposes * * @return the number of managed contexts */ public int getCount() { return synchronizedContextMap.size(); } /** * These methods are used by the LoggerContextFilter. * <p/> * They provide a way to tell the selector which context to use, thus saving * the cost of a JNDI call at each new request. * * @param context */ public void setLocalContext(LoggerContext context) { threadLocal.set(context); } public void removeLocalContext() { threadLocal.remove(); } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.execution; import com.intellij.codeInsight.daemon.impl.analysis.JavaModuleGraphUtil; import com.intellij.debugger.impl.GenericDebuggerRunnerSettings; import com.intellij.diagnostic.logging.OutputFileUtil; import com.intellij.execution.configurations.*; import com.intellij.execution.filters.ArgumentFileFilter; import com.intellij.execution.impl.ConsoleBuffer; import com.intellij.execution.process.*; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.runners.ProgramRunner; import com.intellij.execution.target.TargetEnvironmentConfiguration; import com.intellij.execution.target.TargetEnvironmentRequest; import com.intellij.execution.target.TargetedCommandLineBuilder; import com.intellij.execution.testDiscovery.JavaAutoRunManager; import com.intellij.execution.testframework.*; import com.intellij.execution.testframework.actions.AbstractRerunFailedTestsAction; import com.intellij.execution.testframework.autotest.AbstractAutoTestManager; import com.intellij.execution.testframework.autotest.ToggleAutoTestAction; import com.intellij.execution.testframework.sm.SMTestRunnerConnectionUtil; import com.intellij.execution.testframework.sm.runner.SMRunnerConsolePropertiesProvider; import com.intellij.execution.testframework.sm.runner.SMTRunnerConsoleProperties; import com.intellij.execution.testframework.sm.runner.ui.SMTRunnerConsoleView; import com.intellij.execution.testframework.sm.runner.ui.SMTestRunnerResultsForm; import com.intellij.execution.testframework.ui.BaseTestsOutputConsoleView; import com.intellij.execution.util.JavaParametersUtil; import com.intellij.execution.util.ProgramParametersConfigurator; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.JavaSdkType; import com.intellij.openapi.projectRoots.JavaSdkVersion; import com.intellij.openapi.projectRoots.JdkUtil; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.projectRoots.ex.JavaSdkUtil; import com.intellij.openapi.roots.CompilerModuleExtension; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.openapi.vfs.JarFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.JavaPsiFacade; import com.intellij.psi.PsiDirectory; import com.intellij.psi.PsiJavaModule; import com.intellij.psi.PsiPackage; import com.intellij.psi.impl.PsiImplUtil; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.GlobalSearchScopesCore; import com.intellij.util.PathUtil; import com.intellij.util.PathsList; import com.intellij.util.ui.UIUtil; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.model.serialization.PathMacroUtil; import java.io.*; import java.net.InetAddress; import java.net.ServerSocket; import java.util.*; public abstract class JavaTestFrameworkRunnableState<T extends ModuleBasedConfiguration<JavaRunConfigurationModule, Element> & CommonJavaRunConfigurationParameters & ConfigurationWithCommandLineShortener & SMRunnerConsolePropertiesProvider> extends JavaCommandLineState implements RemoteConnectionCreator { private static final Logger LOG = Logger.getInstance(JavaTestFrameworkRunnableState.class); private static final ExtensionPointName<JUnitPatcher> JUNIT_PATCHER_EP = new ExtensionPointName<>("com.intellij.junitPatcher"); private static final String JIGSAW_OPTIONS = "Jigsaw Options"; public static ParamsGroup getJigsawOptions(JavaParameters parameters) { return parameters.getVMParametersList().getParamsGroup(JIGSAW_OPTIONS); } protected ServerSocket myServerSocket; protected File myTempFile; protected File myWorkingDirsFile = null; private RemoteConnectionCreator remoteConnectionCreator; private final List<ArgumentFileFilter> myArgumentFileFilters = new ArrayList<>(); public void setRemoteConnectionCreator(RemoteConnectionCreator remoteConnectionCreator) { this.remoteConnectionCreator = remoteConnectionCreator; } @Nullable @Override public RemoteConnection createRemoteConnection(ExecutionEnvironment environment) { return remoteConnectionCreator == null ? null : remoteConnectionCreator.createRemoteConnection(environment); } @Override public boolean isPollConnection() { return remoteConnectionCreator != null && remoteConnectionCreator.isPollConnection(); } public JavaTestFrameworkRunnableState(ExecutionEnvironment environment) { super(environment); } @NotNull protected abstract String getFrameworkName(); @NotNull protected abstract String getFrameworkId(); protected abstract void passTempFile(ParametersList parametersList, String tempFilePath); @NotNull protected abstract T getConfiguration(); @Nullable protected abstract TestSearchScope getScope(); @NotNull protected abstract String getForkMode(); @NotNull protected OSProcessHandler createHandler(Executor executor) throws ExecutionException { appendForkInfo(executor); appendRepeatMode(); OSProcessHandler processHandler = new KillableColoredProcessHandler.Silent(createCommandLine()); ProcessTerminatedListener.attach(processHandler); final SearchForTestsTask searchForTestsTask = createSearchingForTestsTask(); if (searchForTestsTask != null) { searchForTestsTask.attachTaskToProcess(processHandler); } return processHandler; } public SearchForTestsTask createSearchingForTestsTask() throws ExecutionException { return null; } protected boolean configureByModule(Module module) { return module != null; } protected boolean isIdBasedTestTree() { return false; } @NotNull @Override protected TargetedCommandLineBuilder createTargetedCommandLine(@NotNull TargetEnvironmentRequest request, @Nullable TargetEnvironmentConfiguration configuration) throws ExecutionException { TargetedCommandLineBuilder commandLineBuilder = super.createTargetedCommandLine(request, configuration); File inputFile = InputRedirectAware.getInputFile(getConfiguration()); if (inputFile != null) { commandLineBuilder.setInputFile(request.createUpload(inputFile.getAbsolutePath())); } Map<String, String> content = commandLineBuilder.getUserData(JdkUtil.COMMAND_LINE_CONTENT); if (content != null) { content.forEach((key, value) -> myArgumentFileFilters.add(new ArgumentFileFilter(key, value))); } return commandLineBuilder; } @NotNull @Override public ExecutionResult execute(@NotNull Executor executor, @NotNull ProgramRunner<?> runner) throws ExecutionException { final RunnerSettings runnerSettings = getRunnerSettings(); final SMTRunnerConsoleProperties testConsoleProperties = getConfiguration().createTestConsoleProperties(executor); testConsoleProperties.setIdBasedTestTree(isIdBasedTestTree()); testConsoleProperties.setIfUndefined(TestConsoleProperties.HIDE_PASSED_TESTS, false); final BaseTestsOutputConsoleView consoleView = SMTestRunnerConnectionUtil.createConsole(getFrameworkName(), testConsoleProperties); final SMTestRunnerResultsForm viewer = ((SMTRunnerConsoleView)consoleView).getResultsViewer(); Disposer.register(getConfiguration().getProject(), consoleView); final OSProcessHandler handler = createHandler(executor); for (ArgumentFileFilter filter : myArgumentFileFilters) { consoleView.addMessageFilter(filter); } consoleView.attachToProcess(handler); final AbstractTestProxy root = viewer.getRoot(); if (root instanceof TestProxyRoot) { ((TestProxyRoot)root).setHandler(handler); } handler.addProcessListener(new ProcessAdapter() { @Override public void startNotified(@NotNull ProcessEvent event) { if (getConfiguration().isSaveOutputToFile()) { final File file = OutputFileUtil.getOutputFile(getConfiguration()); root.setOutputFilePath(file != null ? file.getAbsolutePath() : null); } } @Override public void processTerminated(@NotNull ProcessEvent event) { Runnable runnable = () -> { root.flushOutputFile(); deleteTempFiles(); clear(); }; UIUtil.invokeLaterIfNeeded(runnable); handler.removeProcessListener(this); } }); AbstractRerunFailedTestsAction rerunFailedTestsAction = testConsoleProperties.createRerunFailedTestsAction(consoleView); LOG.assertTrue(rerunFailedTestsAction != null); rerunFailedTestsAction.setModelProvider(() -> viewer); final DefaultExecutionResult result = new DefaultExecutionResult(consoleView, handler); result.setRestartActions(rerunFailedTestsAction, new ToggleAutoTestAction() { @Override public boolean isDelayApplicable() { return false; } @Override public AbstractAutoTestManager getAutoTestManager(Project project) { return JavaAutoRunManager.getInstance(project); } }); JavaRunConfigurationExtensionManager.getInstance().attachExtensionsToProcess(getConfiguration(), handler, runnerSettings); return result; } protected abstract void configureRTClasspath(JavaParameters javaParameters, Module module) throws CantRunException; protected Sdk getJdk() { Project project = getConfiguration().getProject(); final Module module = getConfiguration().getConfigurationModule().getModule(); return module == null ? ProjectRootManager.getInstance(project).getProjectSdk() : ModuleRootManager.getInstance(module).getSdk(); } @Override protected JavaParameters createJavaParameters() throws ExecutionException { final JavaParameters javaParameters = new JavaParameters(); Project project = getConfiguration().getProject(); final Module module = getConfiguration().getConfigurationModule().getModule(); javaParameters.setJdk(getJdk()); final String parameters = getConfiguration().getProgramParameters(); getConfiguration().setProgramParameters(null); try { JavaParametersUtil.configureConfiguration(javaParameters, getConfiguration()); } finally { getConfiguration().setProgramParameters(parameters); } configureClasspath(javaParameters); javaParameters.getClassPath().addFirst(JavaSdkUtil.getIdeaRtJarPath()); for (JUnitPatcher patcher : JUNIT_PATCHER_EP.getExtensionList()) { patcher.patchJavaParameters(project, module, javaParameters); } for (RunConfigurationExtension ext : RunConfigurationExtension.EP_NAME.getExtensionList()) { ext.updateJavaParameters(getConfiguration(), javaParameters, getRunnerSettings(), getEnvironment().getExecutor()); } if (!StringUtil.isEmptyOrSpaces(parameters)) { javaParameters.getProgramParametersList().addAll(getNamedParams(parameters)); } if (ConsoleBuffer.useCycleBuffer()) { javaParameters.getVMParametersList().addProperty("idea.test.cyclic.buffer.size", String.valueOf(ConsoleBuffer.getCycleBufferSize())); } javaParameters.setShortenCommandLine(getConfiguration().getShortenCommandLine(), project); return javaParameters; } protected List<String> getNamedParams(String parameters) { return Collections.singletonList("@name" + parameters); } private ServerSocket myForkSocket = null; @Nullable public ServerSocket getForkSocket() { if (myForkSocket == null && (!Comparing.strEqual(getForkMode(), "none") || forkPerModule()) && getRunnerSettings() != null) { try { myForkSocket = new ServerSocket(0, 0, InetAddress.getByName("127.0.0.1")); } catch (IOException e) { LOG.error(e); } } return myForkSocket; } private boolean isExecutorDisabledInForkedMode() { final RunnerSettings settings = getRunnerSettings(); return settings != null && !(settings instanceof GenericDebuggerRunnerSettings); } public void appendForkInfo(Executor executor) throws ExecutionException { final String forkMode = getForkMode(); if (Comparing.strEqual(forkMode, "none")) { if (forkPerModule()) { if (isExecutorDisabledInForkedMode()) { final String actionName = executor.getActionName(); throw new CantRunException("'" + actionName + "' is disabled when per-module working directory is configured.<br/>" + "Please specify single working directory, or change test scope to single module."); } } else { return; } } else if (isExecutorDisabledInForkedMode()) { final String actionName = executor.getActionName(); throw new CantRunException(actionName + " is disabled in fork mode.<br/>Please change fork mode to &lt;none&gt; to " + StringUtil.toLowerCase(actionName) + "."); } final JavaParameters javaParameters = getJavaParameters(); final Sdk jdk = javaParameters.getJdk(); if (jdk == null) { throw new ExecutionException(ExecutionBundle.message("run.configuration.error.no.jdk.specified")); } try { final File tempFile = FileUtil.createTempFile("command.line", "", true); try (PrintWriter writer = new PrintWriter(tempFile, CharsetToolkit.UTF8)) { ShortenCommandLine shortenCommandLine = getConfiguration().getShortenCommandLine(); boolean useDynamicClasspathForForkMode = shortenCommandLine == null ? JdkUtil.useDynamicClasspath(getConfiguration().getProject()) : shortenCommandLine != ShortenCommandLine.NONE; if (useDynamicClasspathForForkMode && forkPerModule()) { writer.println("use classpath jar"); } else { writer.println(""); } writer.println(((JavaSdkType)jdk.getSdkType()).getVMExecutablePath(jdk)); for (String vmParameter : javaParameters.getVMParametersList().getList()) { writer.println(vmParameter); } } passForkMode(getForkMode(), tempFile, javaParameters); } catch (IOException e) { LOG.error(e); } } protected abstract void passForkMode(String forkMode, File tempFile, JavaParameters parameters) throws ExecutionException; protected void collectListeners(JavaParameters javaParameters, StringBuilder buf, String epName, String delimiter) { final T configuration = getConfiguration(); for (final Object listener : Extensions.getRootArea().getExtensionPoint(epName).getExtensionList()) { boolean enabled = true; for (RunConfigurationExtension ext : RunConfigurationExtension.EP_NAME.getExtensionList()) { if (ext.isListenerDisabled(configuration, listener, getRunnerSettings())) { enabled = false; break; } } if (enabled) { if (buf.length() > 0) buf.append(delimiter); final Class<?> classListener = listener.getClass(); buf.append(classListener.getName()); javaParameters.getClassPath().add(PathUtil.getJarPathForClass(classListener)); } } } protected void configureClasspath(final JavaParameters javaParameters) throws CantRunException { RunConfigurationModule configurationModule = getConfiguration().getConfigurationModule(); final String jreHome = getConfiguration().isAlternativeJrePathEnabled() ? getConfiguration().getAlternativeJrePath() : null; final int pathType = JavaParameters.JDK_AND_CLASSES_AND_TESTS; Module module = configurationModule.getModule(); if (configureByModule(module)) { JavaParametersUtil.configureModule(configurationModule, javaParameters, pathType, jreHome); LOG.assertTrue(module != null); if (JavaSdkUtil.isJdkAtLeast(javaParameters.getJdk(), JavaSdkVersion.JDK_1_9)) { configureModulePath(javaParameters, module); } } else { JavaParametersUtil.configureProject(getConfiguration().getProject(), javaParameters, pathType, jreHome); } configureRTClasspath(javaParameters, module); } protected static PsiJavaModule findJavaModule(Module module, boolean inTests) { return DumbService.getInstance(module.getProject()) .computeWithAlternativeResolveEnabled(() -> JavaModuleGraphUtil.findDescriptorByModule(module, inTests)); } private void configureModulePath(JavaParameters javaParameters, @NotNull Module module) { if (!useModulePath()) return; PsiJavaModule testModule = findJavaModule(module, true); if (testModule != null) { //adding the test module explicitly as it is unreachable from `idea.rt` ParametersList vmParametersList = javaParameters .getVMParametersList() .addParamsGroup(JIGSAW_OPTIONS) .getParametersList(); vmParametersList.add("--add-modules"); vmParametersList.add(testModule.getName()); //setup module path PathsList classPath = javaParameters.getClassPath(); PathsList modulePath = javaParameters.getModulePath(); modulePath.addAll(classPath.getPathList()); classPath.clear(); } else { PsiJavaModule prodModule = findJavaModule(module, false); if (prodModule != null) { splitDepsBetweenModuleAndClasspath(javaParameters, module, prodModule); } } } /** * Put dependencies reachable from module-info located in production sources on the module path * leave all other dependencies on the class path as is */ private void splitDepsBetweenModuleAndClasspath(JavaParameters javaParameters, Module module, PsiJavaModule prodModule) { CompilerModuleExtension compilerExt = CompilerModuleExtension.getInstance(module); if (compilerExt == null) return; PathsList modulePath = javaParameters.getModulePath(); PathsList classPath = javaParameters.getClassPath(); putDependenciesOnModulePath(modulePath, classPath, prodModule); ParametersList vmParametersList = javaParameters.getVMParametersList() .addParamsGroup(JIGSAW_OPTIONS) .getParametersList(); String prodModuleName = prodModule.getName(); //ensure test output is merged to the production module VirtualFile testOutput = compilerExt.getCompilerOutputPathForTests(); if (testOutput != null) { vmParametersList.add("--patch-module"); vmParametersList.add(prodModuleName + "=" + testOutput.getPath()); } //ensure test dependencies missing from production module descriptor are available in tests //todo enumerate all test dependencies explicitly vmParametersList.add("--add-reads"); vmParametersList.add(prodModuleName + "=ALL-UNNAMED"); //open packages with tests to test runner List<String> opensOptions = new ArrayList<>(); collectPackagesToOpen(opensOptions); for (String option : opensOptions) { if (option.isEmpty()) continue; vmParametersList.add("--add-opens"); vmParametersList.add(prodModuleName + "/" + option + "=ALL-UNNAMED"); } //ensure production module is explicitly added as test starter in `idea-rt` doesn't depend on it vmParametersList.add("--add-modules"); vmParametersList.add(prodModuleName); } protected void collectPackagesToOpen(List<String> options) { } /** * called on EDT */ protected static void collectSubPackages(List<String> options, PsiPackage aPackage, GlobalSearchScope globalSearchScope) { if (aPackage.getClasses(globalSearchScope).length > 0) { options.add(aPackage.getQualifiedName()); } PsiPackage[] subPackages = aPackage.getSubPackages(globalSearchScope); for (PsiPackage subPackage : subPackages) { collectSubPackages(options, subPackage, globalSearchScope); } } protected static void putDependenciesOnModulePath(PathsList modulePath, PathsList classPath, PsiJavaModule prodModule) { Set<PsiJavaModule> allRequires = JavaModuleGraphUtil.getAllDependencies(prodModule); allRequires.add(prodModule); //put production output on the module path as well JarFileSystem jarFS = JarFileSystem.getInstance(); ProjectFileIndex fileIndex = ProjectFileIndex.getInstance(prodModule.getProject()); allRequires.stream() .filter(javaModule -> !PsiJavaModule.JAVA_BASE.equals(javaModule.getName())) .map(javaModule -> getClasspathEntry(javaModule, fileIndex, jarFS)) .filter(Objects::nonNull) .forEach(file -> putOnModulePath(modulePath, classPath, file)); } private static void putOnModulePath(PathsList modulePath, PathsList classPath, VirtualFile virtualFile) { String path = PathUtil.getLocalPath(virtualFile.getPath()); if (classPath.getPathList().contains(path)) { classPath.remove(path); modulePath.add(path); } } private static VirtualFile getClasspathEntry(PsiJavaModule javaModule, ProjectFileIndex fileIndex, JarFileSystem jarFileSystem) { VirtualFile moduleFile = PsiImplUtil.getModuleVirtualFile(javaModule); Module moduleDependency = fileIndex.getModuleForFile(moduleFile); if (moduleDependency == null) { return jarFileSystem.getLocalVirtualFileFor(moduleFile); } CompilerModuleExtension moduleExtension = CompilerModuleExtension.getInstance(moduleDependency); return moduleExtension != null ? moduleExtension.getCompilerOutputPath() : null; } protected void createServerSocket(JavaParameters javaParameters) { try { myServerSocket = new ServerSocket(0, 0, InetAddress.getByName("127.0.0.1")); javaParameters.getProgramParametersList().add("-socket" + myServerSocket.getLocalPort()); } catch (IOException e) { LOG.error(e); } } protected boolean spansMultipleModules(final String qualifiedName) { if (qualifiedName != null) { final Project project = getConfiguration().getProject(); final PsiPackage aPackage = JavaPsiFacade.getInstance(project).findPackage(qualifiedName); if (aPackage != null) { final TestSearchScope scope = getScope(); if (scope != null) { final SourceScope sourceScope = scope.getSourceScope(getConfiguration()); if (sourceScope != null) { final GlobalSearchScope configurationSearchScope = GlobalSearchScopesCore.projectTestScope(project).intersectWith( sourceScope.getGlobalSearchScope()); final PsiDirectory[] directories = aPackage.getDirectories(configurationSearchScope); return Arrays.stream(directories) .map(dir -> ModuleUtilCore.findModuleForFile(dir.getVirtualFile(), project)) .filter(Objects::nonNull) .distinct() .count() > 1; } } } } return false; } /** * Configuration based on a package spanning multiple modules. */ protected boolean forkPerModule() { return getScope() != TestSearchScope.SINGLE_MODULE && toChangeWorkingDirectory(getConfiguration().getWorkingDirectory()) && spansMultipleModules(getConfiguration().getPackage()); } private static boolean toChangeWorkingDirectory(final String workingDirectory) { //noinspection deprecation return PathMacroUtil.DEPRECATED_MODULE_DIR.equals(workingDirectory) || PathMacroUtil.MODULE_WORKING_DIR.equals(workingDirectory) || ProgramParametersConfigurator.MODULE_WORKING_DIR.equals(workingDirectory); } protected void createTempFiles(JavaParameters javaParameters) { try { myWorkingDirsFile = FileUtil.createTempFile("idea_working_dirs_" + getFrameworkId(), ".tmp", true); javaParameters.getProgramParametersList().add("@w@" + myWorkingDirsFile.getAbsolutePath()); myTempFile = FileUtil.createTempFile("idea_" + getFrameworkId(), ".tmp", true); passTempFile(javaParameters.getProgramParametersList(), myTempFile.getAbsolutePath()); } catch (Exception e) { LOG.error(e); } } protected void writeClassesPerModule(String packageName, JavaParameters javaParameters, Map<Module, List<String>> perModule, @NotNull String filters) throws FileNotFoundException, UnsupportedEncodingException { if (perModule != null) { final String classpath = getScope() == TestSearchScope.WHOLE_PROJECT ? null : javaParameters.getClassPath().getPathsString(); String workingDirectory = getConfiguration().getWorkingDirectory(); //when only classpath should be changed, e.g. for starting tests in IDEA's project when some modules can never appear on the same classpath, //like plugin and corresponding IDE register the same components twice boolean toChangeWorkingDirectory = toChangeWorkingDirectory(workingDirectory); try (PrintWriter wWriter = new PrintWriter(myWorkingDirsFile, CharsetToolkit.UTF8)) { wWriter.println(packageName); for (Module module : perModule.keySet()) { wWriter.println(toChangeWorkingDirectory ? PathMacroUtil.getModuleDir(module.getModuleFilePath()) : workingDirectory); wWriter.println(module.getName()); if (classpath == null) { final JavaParameters parameters = new JavaParameters(); try { JavaParametersUtil.configureModule(module, parameters, JavaParameters.JDK_AND_CLASSES_AND_TESTS, getConfiguration().isAlternativeJrePathEnabled() ? getConfiguration() .getAlternativeJrePath() : null); if (JavaSdkUtil.isJdkAtLeast(parameters.getJdk(), JavaSdkVersion.JDK_1_9)) { configureModulePath(parameters, module); } configureRTClasspath(parameters, module); parameters.getClassPath().add(JavaSdkUtil.getIdeaRtJarPath()); writeClasspath(wWriter, parameters); } catch (CantRunException e) { writeClasspath(wWriter, javaParameters); } } else { writeClasspath(wWriter, javaParameters); } final List<String> classNames = perModule.get(module); wWriter.println(classNames.size()); for (String className : classNames) { wWriter.println(className); } wWriter.println(filters); } } } } private static void writeClasspath(PrintWriter wWriter, JavaParameters parameters) { wWriter.println(parameters.getClassPath().getPathsString()); wWriter.println(parameters.getModulePath().getPathsString()); ParamsGroup paramsGroup = getJigsawOptions(parameters); if (paramsGroup == null) { wWriter.println(0); } else { List<String> parametersList = paramsGroup.getParametersList().getList(); wWriter.println(parametersList.size()); for (String option : parametersList) { wWriter.println(option); } } } protected void deleteTempFiles() { if (myTempFile != null) { FileUtil.delete(myTempFile); } if (myWorkingDirsFile != null) { FileUtil.delete(myWorkingDirsFile); } } public void appendRepeatMode() throws ExecutionException { } protected boolean useModulePath() { return true; } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cluster; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.test.ESTestCase; import java.nio.file.Path; import java.util.Arrays; import java.util.List; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.equalTo; public class DiskUsageTests extends ESTestCase { public void testDiskUsageCalc() { DiskUsage du = new DiskUsage("node1", "n1", "random", 100, 40); assertThat(du.getFreeDiskAsPercentage(), equalTo(40.0)); assertThat(du.getUsedDiskAsPercentage(), equalTo(100.0 - 40.0)); assertThat(du.getFreeBytes(), equalTo(40L)); assertThat(du.getUsedBytes(), equalTo(60L)); assertThat(du.getTotalBytes(), equalTo(100L)); // Test that DiskUsage handles invalid numbers, as reported by some // filesystems (ZFS & NTFS) DiskUsage du2 = new DiskUsage("node1", "n1","random", 100, 101); assertThat(du2.getFreeDiskAsPercentage(), equalTo(101.0)); assertThat(du2.getFreeBytes(), equalTo(101L)); assertThat(du2.getUsedBytes(), equalTo(-1L)); assertThat(du2.getTotalBytes(), equalTo(100L)); DiskUsage du3 = new DiskUsage("node1", "n1", "random",-1, -1); assertThat(du3.getFreeDiskAsPercentage(), equalTo(100.0)); assertThat(du3.getFreeBytes(), equalTo(-1L)); assertThat(du3.getUsedBytes(), equalTo(0L)); assertThat(du3.getTotalBytes(), equalTo(-1L)); DiskUsage du4 = new DiskUsage("node1", "n1","random", 0, 0); assertThat(du4.getFreeDiskAsPercentage(), equalTo(100.0)); assertThat(du4.getFreeBytes(), equalTo(0L)); assertThat(du4.getUsedBytes(), equalTo(0L)); assertThat(du4.getTotalBytes(), equalTo(0L)); } public void testRandomDiskUsage() { int iters = scaledRandomIntBetween(1000, 10000); for (int i = 1; i < iters; i++) { long total = between(Integer.MIN_VALUE, Integer.MAX_VALUE); long free = between(Integer.MIN_VALUE, Integer.MAX_VALUE); DiskUsage du = new DiskUsage("random", "random", "random", total, free); if (total == 0) { assertThat(du.getFreeBytes(), equalTo(free)); assertThat(du.getTotalBytes(), equalTo(0L)); assertThat(du.getUsedBytes(), equalTo(-free)); assertThat(du.getFreeDiskAsPercentage(), equalTo(100.0)); assertThat(du.getUsedDiskAsPercentage(), equalTo(0.0)); } else { assertThat(du.getFreeBytes(), equalTo(free)); assertThat(du.getTotalBytes(), equalTo(total)); assertThat(du.getUsedBytes(), equalTo(total - free)); assertThat(du.getFreeDiskAsPercentage(), equalTo(100.0 * ((double) free / total))); assertThat(du.getUsedDiskAsPercentage(), equalTo(100.0 - (100.0 * ((double) free / total)))); } } } public void testFillShardLevelInfo() { final Index index = new Index("test", "0xdeadbeef"); ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(index, 0), false, PeerRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); test_0 = ShardRoutingHelper.initialize(test_0, "node1"); test_0 = ShardRoutingHelper.moveToStarted(test_0); Path test0Path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve("0"); CommonStats commonStats0 = new CommonStats(); commonStats0.store = new StoreStats(100); ShardRouting test_1 = ShardRouting.newUnassigned(new ShardId(index, 1), false, PeerRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); test_1 = ShardRoutingHelper.initialize(test_1, "node2"); test_1 = ShardRoutingHelper.moveToStarted(test_1); Path test1Path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve("1"); CommonStats commonStats1 = new CommonStats(); commonStats1.store = new StoreStats(1000); ShardStats[] stats = new ShardStats[] { new ShardStats(test_0, new ShardPath(false, test0Path, test0Path, test_0.shardId()), commonStats0 , null, null), new ShardStats(test_1, new ShardPath(false, test1Path, test1Path, test_1.shardId()), commonStats1 , null, null) }; ImmutableOpenMap.Builder<String, Long> shardSizes = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder<ShardRouting, String> routingToPath = ImmutableOpenMap.builder(); ClusterState state = ClusterState.builder(new ClusterName("blarg")).version(0).build(); InternalClusterInfoService.buildShardLevelInfo(logger, stats, shardSizes, routingToPath, state); assertEquals(2, shardSizes.size()); assertTrue(shardSizes.containsKey(ClusterInfo.shardIdentifierFromRouting(test_0))); assertTrue(shardSizes.containsKey(ClusterInfo.shardIdentifierFromRouting(test_1))); assertEquals(100L, shardSizes.get(ClusterInfo.shardIdentifierFromRouting(test_0)).longValue()); assertEquals(1000L, shardSizes.get(ClusterInfo.shardIdentifierFromRouting(test_1)).longValue()); assertEquals(2, routingToPath.size()); assertTrue(routingToPath.containsKey(test_0)); assertTrue(routingToPath.containsKey(test_1)); assertEquals(test0Path.getParent().getParent().getParent().toAbsolutePath().toString(), routingToPath.get(test_0)); assertEquals(test1Path.getParent().getParent().getParent().toAbsolutePath().toString(), routingToPath.get(test_1)); } public void testFillDiskUsage() { ImmutableOpenMap.Builder<String, DiskUsage> newLeastAvaiableUsages = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder<String, DiskUsage> newMostAvaiableUsages = ImmutableOpenMap.builder(); FsInfo.Path[] node1FSInfo = new FsInfo.Path[] { new FsInfo.Path("/middle", "/dev/sda", 100, 90, 80), new FsInfo.Path("/least", "/dev/sdb", 200, 190, 70), new FsInfo.Path("/most", "/dev/sdc", 300, 290, 280), }; FsInfo.Path[] node2FSInfo = new FsInfo.Path[] { new FsInfo.Path("/least_most", "/dev/sda", 100, 90, 80), }; FsInfo.Path[] node3FSInfo = new FsInfo.Path[] { new FsInfo.Path("/least", "/dev/sda", 100, 90, 70), new FsInfo.Path("/most", "/dev/sda", 100, 90, 80), }; List<NodeStats> nodeStats = Arrays.asList( new NodeStats(new DiscoveryNode("node_1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null,new FsInfo(0, null, node1FSInfo), null,null,null,null,null, null, null), new NodeStats(new DiscoveryNode("node_2", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node2FSInfo), null,null,null,null,null, null, null), new NodeStats(new DiscoveryNode("node_3", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node3FSInfo), null,null,null,null,null, null, null) ); InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages); DiskUsage leastNode_1 = newLeastAvaiableUsages.get("node_1"); DiskUsage mostNode_1 = newMostAvaiableUsages.get("node_1"); assertDiskUsage(mostNode_1, node1FSInfo[2]); assertDiskUsage(leastNode_1, node1FSInfo[1]); DiskUsage leastNode_2 = newLeastAvaiableUsages.get("node_2"); DiskUsage mostNode_2 = newMostAvaiableUsages.get("node_2"); assertDiskUsage(leastNode_2, node2FSInfo[0]); assertDiskUsage(mostNode_2, node2FSInfo[0]); DiskUsage leastNode_3 = newLeastAvaiableUsages.get("node_3"); DiskUsage mostNode_3 = newMostAvaiableUsages.get("node_3"); assertDiskUsage(leastNode_3, node3FSInfo[0]); assertDiskUsage(mostNode_3, node3FSInfo[1]); } public void testFillDiskUsageSomeInvalidValues() { ImmutableOpenMap.Builder<String, DiskUsage> newLeastAvailableUsages = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder<String, DiskUsage> newMostAvailableUsages = ImmutableOpenMap.builder(); FsInfo.Path[] node1FSInfo = new FsInfo.Path[] { new FsInfo.Path("/middle", "/dev/sda", 100, 90, 80), new FsInfo.Path("/least", "/dev/sdb", -1, -1, -1), new FsInfo.Path("/most", "/dev/sdc", 300, 290, 280), }; FsInfo.Path[] node2FSInfo = new FsInfo.Path[] { new FsInfo.Path("/least_most", "/dev/sda", -2, -1, -1), }; FsInfo.Path[] node3FSInfo = new FsInfo.Path[] { new FsInfo.Path("/most", "/dev/sda", 100, 90, 70), new FsInfo.Path("/least", "/dev/sda", 10, -8, 0), }; List<NodeStats> nodeStats = Arrays.asList( new NodeStats(new DiscoveryNode("node_1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null,new FsInfo(0, null, node1FSInfo), null,null,null,null,null, null, null), new NodeStats(new DiscoveryNode("node_2", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node2FSInfo), null,null,null,null,null, null, null), new NodeStats(new DiscoveryNode("node_3", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node3FSInfo), null,null,null,null,null, null, null) ); InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvailableUsages, newMostAvailableUsages); DiskUsage leastNode_1 = newLeastAvailableUsages.get("node_1"); DiskUsage mostNode_1 = newMostAvailableUsages.get("node_1"); assertNull("node1 should have been skipped", leastNode_1); assertDiskUsage(mostNode_1, node1FSInfo[2]); DiskUsage leastNode_2 = newLeastAvailableUsages.get("node_2"); DiskUsage mostNode_2 = newMostAvailableUsages.get("node_2"); assertNull("node2 should have been skipped", leastNode_2); assertNull("node2 should have been skipped", mostNode_2); DiskUsage leastNode_3 = newLeastAvailableUsages.get("node_3"); DiskUsage mostNode_3 = newMostAvailableUsages.get("node_3"); assertDiskUsage(leastNode_3, node3FSInfo[1]); assertDiskUsage(mostNode_3, node3FSInfo[0]); } private void assertDiskUsage(DiskUsage usage, FsInfo.Path path) { assertNotNull(usage); assertNotNull(path); assertEquals(usage.toString(), usage.getPath(), path.getPath()); assertEquals(usage.toString(), usage.getTotalBytes(), path.getTotal().getBytes()); assertEquals(usage.toString(), usage.getFreeBytes(), path.getAvailable().getBytes()); } }
/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is part of dcm4che, an implementation of DICOM(TM) in * Java(TM), available at http://sourceforge.net/projects/dcm4che. * * The Initial Developer of the Original Code is * TIANI Medgraph AG. * Portions created by the Initial Developer are Copyright (C) 2003-2005 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Gunter Zeilinger <gunter.zeilinger@tiani.com> * Franz Willer <franz.willer@gwi-ag.com> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package org.dcm4chee.xdsb.source.mbean; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.activation.DataHandler; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLSession; import javax.xml.bind.JAXBElement; import javax.xml.bind.Unmarshaller; import javax.xml.transform.Result; import javax.xml.transform.Source; import javax.xml.transform.Templates; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerConfigurationException; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.TransformerFactoryConfigurationError; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.sax.SAXTransformerFactory; import javax.xml.transform.sax.TransformerHandler; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.apache.log4j.Logger; import org.dcm4che2.audit.message.AuditEvent; import org.dcm4che2.audit.message.AuditMessage; import org.dcm4che2.util.UIDUtils; import org.dcm4chee.xds.common.UUID; import org.dcm4chee.xds.common.XDSConstants; import org.dcm4chee.xds.common.audit.HttpUserInfo; import org.dcm4chee.xds.common.audit.XDSExportMessage; import org.dcm4chee.xds.common.exception.XDSException; import org.dcm4chee.xds.common.infoset.ExtrinsicObjectType; import org.dcm4chee.xds.common.infoset.ObjectFactory; import org.dcm4chee.xds.common.infoset.ProvideAndRegisterDocumentSetRequestType; import org.dcm4chee.xds.common.infoset.RegistryPackageType; import org.dcm4chee.xds.common.infoset.RegistryResponseType; import org.dcm4chee.xds.common.infoset.SubmitObjectsRequest; import org.dcm4chee.xds.common.infoset.ProvideAndRegisterDocumentSetRequestType.Document; import org.dcm4chee.xds.common.utils.InfoSetUtil; import org.dcm4chee.xds.common.ws.DocumentRepositoryPortType; import org.dcm4chee.xds.common.ws.DocumentRepositoryPortTypeFactory; import org.jboss.system.ServiceMBeanSupport; import org.jboss.system.server.ServerConfigLocator; import org.w3c.dom.Node; /** * @author franz.willer@gmail.com * @version $Revision: 5476 $ $Date: 2007-11-21 09:45:36 +0100 (Mi, 21 Nov 2007) $ * @since Mar 08, 2006 */ public class XDSbSourceService extends ServiceMBeanSupport { private static final String V2TOV3_XSL = "v2tov3.xsl"; private static final String V3TOV2_XSL = "v3tov2.xsl"; private static final String CERT = "CERT"; private static final String NONE = "NONE"; private String xdsRepositoryURI; private String proxyHost; private int proxyPort; private String keystoreURL = "resource:identity.p12"; private String keystorePassword; private String trustStoreURL = "resource:cacerts.jks"; private String trustStorePassword; private HostnameVerifier origHostnameVerifier = null; private String allowedUrlHost = null; private String sourceID; private boolean logRequest; private boolean logResponse; private boolean indentXmlLog; private boolean useSOAP11=false; private ObjectFactory objFac = new ObjectFactory(); private String v2Tov3Xslt; private String v3Tov2Xslt; private Templates v2toV3tpl; private Templates v3toV2tpl; public String getSourceId() { return sourceID; } public void setSourceId(String id) { this.sourceID = id; } /** * @return Returns the docRepositoryURI. */ public String getXDSRepositoryURI() { return this.xdsRepositoryURI; } /** * @param docRepositoryURI The docRepositoryURI to set. */ public void setXDSRepositoryURI(String uri) { this.xdsRepositoryURI = uri; } /** * @return Returns the proxyHost. */ public String getProxyHost() { return proxyHost == null ? NONE : proxyHost; } /** * @param proxyHost The proxyHost to set. */ public void setProxyHost(String proxyHost) { if ( NONE.equals(proxyHost) ) this.proxyHost = null; else this.proxyHost = proxyHost; } /** * @return Returns the proxyPort. */ public int getProxyPort() { return proxyPort; } /** * @param proxyPort The proxyPort to set. */ public void setProxyPort(int proxyPort) { this.proxyPort = proxyPort; } /** * @param keyStorePassword The keyStorePassword to set. */ public void setKeyStorePassword(String keyStorePassword) { if ( NONE.equals(keyStorePassword)) keyStorePassword = null; this.keystorePassword = keyStorePassword; } /** * @return Returns the keyStoreURL. */ public String getKeyStoreURL() { return keystoreURL; } /** * @param keyStoreURL The keyStoreURL to set. */ public void setKeyStoreURL(String keyStoreURL) { this.keystoreURL = keyStoreURL; } /** * @return Returns the trustStore. */ public String getTrustStoreURL() { return trustStoreURL == null ? NONE : trustStoreURL; } /** * @param trustStore The trustStore to set. */ public void setTrustStoreURL(String trustStoreURL) { if ( NONE.equals(trustStoreURL ) ) { this.trustStoreURL = null; } else { this.trustStoreURL = trustStoreURL; } } /** * @param trustStorePassword The trustStorePassword to set. */ public void setTrustStorePassword(String trustStorePassword) { if ( NONE.equals(trustStorePassword)) trustStorePassword = null; this.trustStorePassword = trustStorePassword; } /** * @return Returns the allowedUrlHost. */ public String getAllowedUrlHost() { return allowedUrlHost == null ? CERT : allowedUrlHost; } /** * @param allowedUrlHost The allowedUrlHost to set. */ public void setAllowedUrlHost(String allowedUrlHost) { this.allowedUrlHost = CERT.equals(allowedUrlHost) ? null : allowedUrlHost; } /** * @return Returns the logSOAPMessage. */ public boolean isLogRequestMessage() { return logRequest; } /** * @param logSOAPMessage The logSOAPMessage to set. */ public void setLogRequestMessage(boolean b) { this.logRequest = b; } public boolean isLogResponseMessage() { return logResponse; } public void setLogResponseMessage(boolean b) { this.logResponse = b; } public boolean isIndentXmlLog() { return indentXmlLog; } public void setIndentXmlLog(boolean indentSOAPLog) { this.indentXmlLog = indentSOAPLog; } public String getV2Tov3Xslt() { return v2Tov3Xslt == null ? "DEFAULT" : v2Tov3Xslt; } public void setV2Tov3Xslt(String tov3Xslt) { v2Tov3Xslt = "DEFAULT".equals(tov3Xslt) ? null : tov3Xslt; v2toV3tpl = null; } public String getV3Tov2Xslt() { return v3Tov2Xslt == null ? "DEFAULT" : v3Tov2Xslt; } public void setV3Tov2Xslt(String tov2Xslt) { v3Tov2Xslt = "DEFAULT".equals(tov2Xslt) ? null : tov2Xslt; v3toV2tpl = null; } private Templates getV2toV3Template() throws TransformerConfigurationException, TransformerFactoryConfigurationError, IOException { if ( v2toV3tpl == null ) { URL url = this.getClass().getResource(V2TOV3_XSL); SAXTransformerFactory tf = (SAXTransformerFactory)TransformerFactory.newInstance(); if ( v2Tov3Xslt == null ) { v2toV3tpl = tf.newInstance().newTemplates( new StreamSource(this.getClass().getResourceAsStream(V2TOV3_XSL)) ); } else { v2toV3tpl = tf.newInstance().newTemplates( new StreamSource(new File(resolvePath(v2Tov3Xslt)))); } } return v2toV3tpl; } private Templates getV3toV2Template() throws TransformerConfigurationException, TransformerFactoryConfigurationError { if ( v3toV2tpl == null ) { SAXTransformerFactory tf = (SAXTransformerFactory)TransformerFactory.newInstance(); if ( v3Tov2Xslt == null ) { v3toV2tpl = tf.newInstance().newTemplates( new StreamSource(this.getClass().getResourceAsStream(V3TOV2_XSL))); } else { v3toV2tpl = tf.newInstance().newTemplates( new StreamSource(new File(resolvePath(v3Tov2Xslt)))); } } return v3toV2tpl; } public boolean isUseSOAP11() { return useSOAP11; } public void setUseSOAP11(boolean useSOAP11) { this.useSOAP11 = useSOAP11; } private boolean isRimV2(Node n) { if ( n instanceof org.w3c.dom.Document) n = n.getFirstChild(); return XDSConstants.NS_URN_REGISTRY_2_1.equals(n.getNamespaceURI()); } private Node convertRimVersion(Node nIn, boolean toV3) throws Exception { if ( log.isDebugEnabled()) { log.debug("convert rim version to "+(toV3 ? "V3" : "V2")); logNode("convert rim version input:", nIn); log.debug("Input is Document?:"+(nIn instanceof org.w3c.dom.Document)); } DOMResult result = new DOMResult(); convertV2V3(new DOMSource(nIn), result, toV3 ? getV2toV3Template() : getV3toV2Template() ); Node nOut = result.getNode(); if (log.isDebugEnabled() ) logNode("convert rim version output:", nOut); return nOut; } private void convertV2V3(Source xmlSource, Result target, Templates tpl) throws TransformerConfigurationException, TransformerException{ SAXTransformerFactory tf = (SAXTransformerFactory)TransformerFactory.newInstance(); TransformerHandler th = tf.newTransformerHandler(tpl); th.getTransformer().transform(xmlSource, target); } public Node exportDocuments(Node req, Map docs) throws XDSException { return exportDocuments(req, docs, false); } public Node exportDocuments(Node req, Map docs, boolean unifyUIDs) throws XDSException { try { log.info("------------exportDocuments:"+req); Unmarshaller unmarshaller = InfoSetUtil.getJAXBContext().createUnmarshaller(); boolean v2Req = isRimV2(req); SubmitObjectsRequest submitRequest = (SubmitObjectsRequest) unmarshaller.unmarshal( v2Req ? convertRimVersion(req, true) : req ); log.info("unmarshalled SubmitObjectsRequest:"+ submitRequest); if ( InfoSetUtil.getRegistryPackage(submitRequest) == null ) { submitRequest = unmarshallWorkaround(req, unmarshaller); } ProvideAndRegisterDocumentSetRequestType pnr = objFac.createProvideAndRegisterDocumentSetRequestType(); pnr.setSubmitObjectsRequest(submitRequest); List l = pnr.getDocument(); Map.Entry entry; Document doc; DataHandler dh; for ( Iterator iter = docs.entrySet().iterator() ; iter.hasNext() ;) { entry = (Map.Entry)iter.next(); dh = (DataHandler) entry.getValue(); doc = objFac.createProvideAndRegisterDocumentSetRequestTypeDocument(); doc.setId((String) entry.getKey()); doc.setValue(dh); l.add(doc); } if (unifyUIDs) unifyUIDs(pnr); RegistryResponseType regRsp = this.exportDocuments(pnr); DOMResult result = new DOMResult(); InfoSetUtil.getJAXBContext().createMarshaller().marshal( objFac.createRegistryResponse(regRsp), result); Node nRsp = result.getNode(); if ( v2Req ) { return convertRimVersion(nRsp, false); } else { return nRsp; } } catch (Throwable t) { throw new XDSException(XDSConstants.XDS_ERR_REPOSITORY_ERROR,"Provide And Register failed!",t); } } private SubmitObjectsRequest unmarshallWorkaround(Node req, Unmarshaller unmarshaller) throws Exception { SubmitObjectsRequest submitRequest; File errFile = new File(resolvePath("log/xds/error/xds_err_unmarshall.xml")); errFile.getParentFile().mkdirs(); FileOutputStream os = new FileOutputStream(errFile); writeNode(req, os); os.close(); log.info("Unmarshalling Error! Request saved as file ("+errFile+")!" ); log.info("Try to unmarshall with workaround!"); DOMResult result = new DOMResult(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); this.writeNode(req, baos); byte[] xmlData = baos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(xmlData); for ( int i = 0 ; i < xmlData.length && xmlData[i] != '<' ; bais.read(),i++ ); convertV2V3(new StreamSource(bais), result, getV2toV3Template() ); Node nOut = result.getNode(); submitRequest = (SubmitObjectsRequest) unmarshaller.unmarshal(nOut); log.debug("unmarshalled SubmitObjectsRequest WORKAROUND:"+ submitRequest); log.debug("InfoSetUtil.getRegistryPackage(submitRequest):"+InfoSetUtil.getRegistryPackage(submitRequest) ); log.info("Unmarshall with workaround success:"+InfoSetUtil.getRegistryPackage(submitRequest) != null ); return submitRequest; } public boolean exportTestFile(String filename) throws Exception { Unmarshaller unmarshaller = InfoSetUtil.getJAXBContext().createUnmarshaller(); JAXBElement o = (JAXBElement) unmarshaller.unmarshal(new File(filename)); log.debug("unmarshalled Object:"+ o); ProvideAndRegisterDocumentSetRequestType req = (ProvideAndRegisterDocumentSetRequestType) o.getValue(); log.debug("unmarshalled ProvideAndRegisterDocumentSet Request:"+ req); unifyUIDs(req); RegistryResponseType rsp = exportDocuments( req ); return this.checkResponse(rsp); } private void unifyUIDs(ProvideAndRegisterDocumentSetRequestType req) { SubmitObjectsRequest so = req.getSubmitObjectsRequest(); Map eoMap = InfoSetUtil.getExtrinsicObjects(so); ExtrinsicObjectType eo; //unify all Document UIDs for ( Iterator iter = eoMap.values().iterator() ; iter.hasNext() ;) { eo = (ExtrinsicObjectType) iter.next(); InfoSetUtil.setExternalIdentifierValue(UUID.XDSDocumentEntry_uniqueId, UIDUtils.createUID(), eo); } RegistryPackageType rp = InfoSetUtil.getRegistryPackage(so); InfoSetUtil.setExternalIdentifierValue(UUID.XDSSubmissionSet_uniqueId, UIDUtils.createUID(), rp); InfoSetUtil.setExternalIdentifierValue(UUID.XDSSubmissionSet_sourceId, this.getSourceId(), rp); } public RegistryResponseType exportDocuments(ProvideAndRegisterDocumentSetRequestType req) throws XDSException { try { log.debug("------------exportDocuments"); SubmitObjectsRequest submitRequest = req.getSubmitObjectsRequest(); if (logRequest) { log.info("ProvideAndRegisterDocumentSetRequest xml:"+InfoSetUtil.marshallObject( objFac.createProvideAndRegisterDocumentSetRequest(req), indentXmlLog)); } RegistryPackageType registryPackage = InfoSetUtil.getRegistryPackage(submitRequest); if ( registryPackage == null ) { log.error("No RegistryPackage found!"); throw new XDSException( XDSConstants.XDS_ERR_REPOSITORY_ERROR, XDSConstants.XDS_ERR_MISSING_REGISTRY_PACKAGE, null); } String submissionUID = InfoSetUtil.getExternalIdentifierValue(UUID.XDSSubmissionSet_uniqueId,registryPackage); String patId = InfoSetUtil.getExternalIdentifierValue(UUID.XDSSubmissionSet_patientId,registryPackage); String patName = "hidden"; log.info("SubmissionUID:"+submissionUID); log.info("patId:"+patId); log.info("patName:"+patName); configProxyAndTLS(xdsRepositoryURI); DocumentRepositoryPortType port = null; if (useSOAP11) { port = DocumentRepositoryPortTypeFactory.getDocumentRepositoryPortSoap11( xdsRepositoryURI, XDSConstants.URN_IHE_ITI_2007_PROVIDE_AND_REGISTER_DOCUMENT_SET_B, java.util.UUID.randomUUID().toString()); } else { port = DocumentRepositoryPortTypeFactory.getDocumentRepositoryPortSoap12( xdsRepositoryURI, XDSConstants.URN_IHE_ITI_2007_PROVIDE_AND_REGISTER_DOCUMENT_SET_B, java.util.UUID.randomUUID().toString()); } log.info("####################################################"); log.info("####################################################"); log.info("XDS.b: Send provide and register document-b request to repository:"+xdsRepositoryURI); log.info("####################################################"); log.info("####################################################"); RegistryResponseType rsp = port.documentRepositoryProvideAndRegisterDocumentSetB(req); if ( this.logResponse ) { log.info("Received RegistryResponse:"+InfoSetUtil.marshallObject( objFac.createRegistryResponse(rsp), indentXmlLog) ); } boolean success = checkResponse( rsp ); logExport(submissionUID, patId, patName, success); log.info("ProvideAndRegisterDocumentSetRequest success:"+success); return rsp; /*_*/ } catch (XDSException x) { throw x; } catch (Throwable t) { throw new XDSException(XDSConstants.XDS_ERR_REPOSITORY_ERROR,"Provide And Register failed!",t); } } private boolean checkResponse(RegistryResponseType rsp) throws Exception { if ( rsp == null ){ log.error("No RegistryResponse from registry!"); return false; } log.debug("Check RegistryResponse:"+InfoSetUtil.marshallObject(objFac.createRegistryResponse(rsp), indentXmlLog) ); String status = rsp.getStatus(); log.debug("Rsp status:"+status ); return status == null ? false : XDSConstants.XDS_B_STATUS_SUCCESS.equalsIgnoreCase(rsp.getStatus()); } /** * */ private void configProxyAndTLS(String url) { String protocol = url.startsWith("https") ? "https" : "http"; if ( proxyHost != null && proxyHost.trim().length() > 1 ) { System.setProperty( protocol+".proxyHost", proxyHost); System.setProperty(protocol+".proxyPort", String.valueOf(proxyPort)); } else { System.setProperty(protocol+".proxyHost", ""); System.setProperty(protocol+".proxyPort", ""); } if ( "https".equals(protocol) && trustStoreURL != null ) { String keyStorePath = resolvePath(keystoreURL); String trustStorePath = resolvePath(trustStoreURL); System.setProperty("javax.net.ssl.keyStore", keyStorePath); if ( keystorePassword != null ) System.setProperty("javax.net.ssl.keyStorePassword", keystorePassword); System.setProperty("javax.net.ssl.keyStoreType","PKCS12"); System.setProperty("javax.net.ssl.trustStore", trustStorePath); if ( trustStorePassword != null ) System.setProperty("javax.net.ssl.trustStorePassword", trustStorePassword); if ( origHostnameVerifier == null) { origHostnameVerifier = HttpsURLConnection.getDefaultHostnameVerifier(); HostnameVerifier hv = new HostnameVerifier() { public boolean verify(String urlHostName, SSLSession session) { if ( !origHostnameVerifier.verify ( urlHostName, session)) { if ( isAllowedUrlHost(urlHostName)) { log.warn("Warning: URL Host: "+urlHostName+" vs. "+session.getPeerHost()); } else { return false; } } return true; } private boolean isAllowedUrlHost(String urlHostName) { if (allowedUrlHost == null) return false; if ( allowedUrlHost.equals("*")) return true; return allowedUrlHost.equals(urlHostName); } }; HttpsURLConnection.setDefaultHostnameVerifier(hv); } } } public void testExport(String fnV2SubmReq, String fnDoc) throws Exception { File fSubmReq = new File(resolvePath(fnV2SubmReq)); if (!fSubmReq.isFile()) { throw new FileNotFoundException(fSubmReq.getAbsolutePath()); } File fDoc = new File(resolvePath(fnDoc)); SAXTransformerFactory tf = (SAXTransformerFactory)TransformerFactory.newInstance(); DOMResult result = new DOMResult(); tf.newTransformer().transform(new StreamSource(fSubmReq), result); Node req = result.getNode(); DataHandler dh = new DataHandler(fDoc.toURL()); Map docs = new HashMap(); docs.put("doc_1", dh); this.exportDocuments(req, docs, true); } public void convertV2toV3File(String fnSrc, String fnDst) throws Exception { File src = new File(resolvePath(fnSrc)); if (!src.isFile()) { throw new FileNotFoundException(src.getAbsolutePath()); } File dst = new File(resolvePath(fnDst)); dst.getParentFile().mkdirs(); convertV2V3(new StreamSource(src), new StreamResult(dst), getV2toV3Template()); } public static String resolvePath(String fn) { File f = new File(fn); if (f.isAbsolute()) return f.getAbsolutePath(); File serverHomeDir = ServerConfigLocator.locate().getServerHomeDir(); return new File(serverHomeDir, f.getPath()).getAbsolutePath(); } private void logExport(String submissionUID, String patId, String patName, boolean success) { HttpUserInfo userInfo = new HttpUserInfo(AuditMessage.isEnableDNSLookups()); String user = userInfo.getUserId(); XDSExportMessage msg = XDSExportMessage.createDocumentSourceExportMessage(submissionUID, patId, patName); msg.setOutcomeIndicator(success ? AuditEvent.OutcomeIndicator.SUCCESS: AuditEvent.OutcomeIndicator.MINOR_FAILURE); msg.setSource(AuditMessage.getProcessID(), AuditMessage.getLocalAETitles(), AuditMessage.getProcessName(), AuditMessage.getLocalHostName()); msg.setHumanRequestor(user != null ? user : "unknown", null, null); String host = "unknown"; try { host = new URL(xdsRepositoryURI).getHost(); } catch (MalformedURLException ignore) { } msg.setDestination(xdsRepositoryURI, null, "XDS Export", host ); msg.validate(); Logger.getLogger("auditlog").info(msg); } public void logNode(String msg, Node node) { try { ByteArrayOutputStream out = new ByteArrayOutputStream(); out.write(msg.getBytes()); writeNode( node, out ); log.info(out.toString()); } catch (Exception e) { log.warn("Failed to log Node "+node, e); } } public void writeNode(Node node, OutputStream out) { try { Source s = new DOMSource(node); out.write('\n'); Transformer t = TransformerFactory.newInstance().newTransformer(); if (indentXmlLog) t.setOutputProperty("indent", "yes"); t.transform(s, new StreamResult(out)); } catch (Exception e) { log.warn("Failed to log Node "+node, e); } /*_*/ } }
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.impl.internal.store.tiering; import org.ehcache.Cache; import org.ehcache.config.ResourcePool; import org.ehcache.config.ResourcePools; import org.ehcache.config.ResourceType; import org.ehcache.core.CacheConfigurationChangeListener; import org.ehcache.core.spi.store.StoreAccessException; import org.ehcache.core.spi.function.BiFunction; import org.ehcache.core.spi.function.Function; import org.ehcache.core.spi.function.NullaryFunction; import org.ehcache.impl.internal.store.disk.OffHeapDiskStore; import org.ehcache.impl.internal.store.heap.OnHeapStore; import org.ehcache.impl.internal.store.offheap.OffHeapStore; import org.ehcache.spi.service.ServiceProvider; import org.ehcache.core.spi.store.Store; import org.ehcache.core.spi.store.events.StoreEventSource; import org.ehcache.core.spi.store.tiering.AuthoritativeTier; import org.ehcache.core.spi.store.tiering.CachingTier; import org.ehcache.spi.service.Service; import org.ehcache.spi.service.ServiceConfiguration; import org.ehcache.core.internal.util.ConcurrentWeakIdentityHashMap; import org.ehcache.spi.service.ServiceDependencies; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terracotta.context.annotations.ContextAttribute; import org.terracotta.statistics.StatisticsManager; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.unmodifiableSet; import static org.ehcache.config.ResourceType.Core.DISK; import static org.ehcache.config.ResourceType.Core.HEAP; import static org.ehcache.config.ResourceType.Core.OFFHEAP; /** * A {@link Store} implementation supporting a tiered caching model. */ public class TieredStore<K, V> implements Store<K, V> { private static final Logger LOG = LoggerFactory.getLogger(TieredStore.class); private final AtomicReference<CachingTier<K, V>> cachingTierRef; private final CachingTier<K, V> noopCachingTier; private final CachingTier<K, V> realCachingTier; private final AuthoritativeTier<K, V> authoritativeTier; private final TieringStoreStatsSettings tieringStoreStatsSettings; public TieredStore(CachingTier<K, V> cachingTier, AuthoritativeTier<K, V> authoritativeTier) { this.cachingTierRef = new AtomicReference<CachingTier<K, V>>(cachingTier); this.authoritativeTier = authoritativeTier; this.realCachingTier = cachingTier; this.noopCachingTier = new NoopCachingTier<K, V>(authoritativeTier); this.realCachingTier.setInvalidationListener(new CachingTier.InvalidationListener<K, V>() { @Override public void onInvalidation(K key, ValueHolder<V> valueHolder) { TieredStore.this.authoritativeTier.flush(key, valueHolder); } }); this.authoritativeTier.setInvalidationValve(new AuthoritativeTier.InvalidationValve() { @Override public void invalidateAll() throws StoreAccessException { invalidateAllInternal(); } }); StatisticsManager.associate(cachingTier).withParent(this); StatisticsManager.associate(authoritativeTier).withParent(this); tieringStoreStatsSettings = new TieringStoreStatsSettings(cachingTier, authoritativeTier); StatisticsManager.associate(tieringStoreStatsSettings).withParent(this); } @Override public ValueHolder<V> get(final K key) throws StoreAccessException { try { return cachingTier().getOrComputeIfAbsent(key, new Function<K, ValueHolder<V>>() { @Override public ValueHolder<V> apply(K key) { try { return authoritativeTier.getAndFault(key); } catch (StoreAccessException cae) { throw new ComputationException(cae); } } }); } catch (ComputationException ce) { throw ce.getStoreAccessException(); } } static class ComputationException extends RuntimeException { public ComputationException(StoreAccessException cause) { super(cause); } public StoreAccessException getStoreAccessException() { return (StoreAccessException) getCause(); } @Override public synchronized Throwable fillInStackTrace() { return this; } } @Override public boolean containsKey(K key) throws StoreAccessException { return authoritativeTier.containsKey(key); } @Override public PutStatus put(final K key, final V value) throws StoreAccessException { try { return authoritativeTier.put(key, value); } finally { cachingTier().invalidate(key); } } @Override public ValueHolder<V> putIfAbsent(K key, V value) throws StoreAccessException { ValueHolder<V> previous = null; try { previous = authoritativeTier.putIfAbsent(key, value); } finally { if (previous == null) { cachingTier().invalidate(key); } } return previous; } @Override public boolean remove(K key) throws StoreAccessException { try { return authoritativeTier.remove(key); } finally { cachingTier().invalidate(key); } } @Override public RemoveStatus remove(K key, V value) throws StoreAccessException { RemoveStatus removed = null; try { removed = authoritativeTier.remove(key, value); return removed; } finally { if (removed != null && removed.equals(RemoveStatus.REMOVED)) { cachingTier().invalidate(key); } } } @Override public ValueHolder<V> replace(K key, V value) throws StoreAccessException { ValueHolder<V> previous = null; boolean exceptionThrown = true; try { previous = authoritativeTier.replace(key, value); exceptionThrown = false; } finally { if (exceptionThrown || previous != null) { cachingTier().invalidate(key); } } return previous; } @Override public ReplaceStatus replace(K key, V oldValue, V newValue) throws StoreAccessException { ReplaceStatus replaced = null; try { replaced = authoritativeTier.replace(key, oldValue, newValue); } finally { if (replaced != null && replaced.equals(ReplaceStatus.HIT)) { cachingTier().invalidate(key); } } return replaced; } @Override public void clear() throws StoreAccessException { boolean interrupted = false; while(!cachingTierRef.compareAndSet(realCachingTier, noopCachingTier)) { synchronized (noopCachingTier) { if(cachingTierRef.get() == noopCachingTier) { try { noopCachingTier.wait(); } catch (InterruptedException e) { interrupted = true; } } } } if(interrupted) { Thread.currentThread().interrupt(); } try { authoritativeTier.clear(); } finally { try { realCachingTier.clear(); } finally { if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { throw new AssertionError("Something bad happened"); } synchronized (noopCachingTier) { noopCachingTier.notify(); } } } } private void invalidateAllInternal() throws StoreAccessException { boolean interrupted = false; while(!cachingTierRef.compareAndSet(realCachingTier, noopCachingTier)) { synchronized (noopCachingTier) { if(cachingTierRef.get() == noopCachingTier) { try { noopCachingTier.wait(); } catch (InterruptedException e) { interrupted = true; } } } } if(interrupted) { Thread.currentThread().interrupt(); } try { realCachingTier.invalidateAll(); } finally { if(!cachingTierRef.compareAndSet(noopCachingTier, realCachingTier)) { throw new AssertionError("Something bad happened"); } synchronized (noopCachingTier) { noopCachingTier.notify(); } } } @Override public StoreEventSource<K, V> getStoreEventSource() { return authoritativeTier.getStoreEventSource(); } @Override public Iterator<Cache.Entry<K, ValueHolder<V>>> iterator() { return authoritativeTier.iterator(); } @Override public ValueHolder<V> compute(final K key, final BiFunction<? super K, ? super V, ? extends V> mappingFunction) throws StoreAccessException { try { return authoritativeTier.compute(key, mappingFunction); } finally { cachingTier().invalidate(key); } } @Override public ValueHolder<V> compute(final K key, final BiFunction<? super K, ? super V, ? extends V> mappingFunction, final NullaryFunction<Boolean> replaceEqual) throws StoreAccessException { try { return authoritativeTier.compute(key, mappingFunction, replaceEqual); } finally { cachingTier().invalidate(key); } } public ValueHolder<V> computeIfAbsent(final K key, final Function<? super K, ? extends V> mappingFunction) throws StoreAccessException { try { return cachingTier().getOrComputeIfAbsent(key, new Function<K, ValueHolder<V>>() { @Override public ValueHolder<V> apply(K k) { try { return authoritativeTier.computeIfAbsentAndFault(k, mappingFunction); } catch (StoreAccessException cae) { throw new ComputationException(cae); } } }); } catch (ComputationException ce) { throw ce.getStoreAccessException(); } } @Override public Map<K, ValueHolder<V>> bulkCompute(Set<? extends K> keys, Function<Iterable<? extends Map.Entry<? extends K, ? extends V>>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> remappingFunction) throws StoreAccessException { try { return authoritativeTier.bulkCompute(keys, remappingFunction); } finally { for (K key : keys) { cachingTier().invalidate(key); } } } @Override public Map<K, ValueHolder<V>> bulkCompute(Set<? extends K> keys, Function<Iterable<? extends Map.Entry<? extends K, ? extends V>>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> remappingFunction, NullaryFunction<Boolean> replaceEqual) throws StoreAccessException { try { return authoritativeTier.bulkCompute(keys, remappingFunction, replaceEqual); } finally { for (K key : keys) { cachingTier().invalidate(key); } } } @Override public Map<K, ValueHolder<V>> bulkComputeIfAbsent(Set<? extends K> keys, Function<Iterable<? extends K>, Iterable<? extends Map.Entry<? extends K, ? extends V>>> mappingFunction) throws StoreAccessException { try { return authoritativeTier.bulkComputeIfAbsent(keys, mappingFunction); } finally { for (K key : keys) { cachingTier().invalidate(key); } } } @Override public List<CacheConfigurationChangeListener> getConfigurationChangeListeners() { List<CacheConfigurationChangeListener> configurationChangeListenerList = new ArrayList<CacheConfigurationChangeListener>(); configurationChangeListenerList.addAll(realCachingTier.getConfigurationChangeListeners()); configurationChangeListenerList.addAll(authoritativeTier.getConfigurationChangeListeners()); return configurationChangeListenerList; } private CachingTier<K, V> cachingTier() { return cachingTierRef.get(); } @ServiceDependencies({CompoundCachingTier.Provider.class, OnHeapStore.Provider.class, OffHeapStore.Provider.class, OffHeapDiskStore.Provider.class}) public static class Provider implements Store.Provider { private static final Set<Set<ResourceType.Core>> SUPPORTED_RESOURCE_COMBINATIONS; static { // Logic in setTierConfigurations must mirror this set final Set<Set<ResourceType.Core>> supported = new HashSet<Set<ResourceType.Core>>(); supported.add(unmodifiableSet(EnumSet.of(HEAP, DISK))); supported.add(unmodifiableSet(EnumSet.of(HEAP, OFFHEAP))); supported.add(unmodifiableSet(EnumSet.of(HEAP, OFFHEAP, DISK))); SUPPORTED_RESOURCE_COMBINATIONS = unmodifiableSet(supported); } private volatile ServiceProvider<Service> serviceProvider; private final ConcurrentMap<Store<?, ?>, Map.Entry<CachingTier.Provider, AuthoritativeTier.Provider>> providersMap = new ConcurrentWeakIdentityHashMap<Store<?, ?>, Map.Entry<CachingTier.Provider, AuthoritativeTier.Provider>>(); @Override public int rank(final Set<ResourceType<?>> resourceTypes, final Collection<ServiceConfiguration<?>> serviceConfigs) { if (SUPPORTED_RESOURCE_COMBINATIONS.contains(resourceTypes)) { return resourceTypes.size(); } else { return 0; } } @Override public <K, V> Store<K, V> createStore(Configuration<K, V> storeConfig, ServiceConfiguration<?>... serviceConfigs) { final ArrayList<ServiceConfiguration<?>> enhancedServiceConfigs = new ArrayList<ServiceConfiguration<?>>(Arrays.asList(serviceConfigs)); TieredStoreConfiguration tieredStoreServiceConfig = setTierConfigurations(storeConfig, enhancedServiceConfigs); Class<? extends CachingTier.Provider> cachingTierProviderClass = tieredStoreServiceConfig.cachingTierProvider(); CachingTier.Provider cachingTierProvider = serviceProvider.getService(cachingTierProviderClass); if (cachingTierProvider == null) { throw new IllegalArgumentException("No registered service for caching tier provider " + cachingTierProviderClass.getName()); } Class<? extends AuthoritativeTier.Provider> authoritativeTierProviderClass = tieredStoreServiceConfig.authoritativeTierProvider(); AuthoritativeTier.Provider authoritativeTierProvider = serviceProvider.getService(authoritativeTierProviderClass); if (authoritativeTierProvider == null) { throw new IllegalArgumentException("No registered service for authoritative tier provider " + authoritativeTierProviderClass.getName()); } final ServiceConfiguration<?>[] configurations = enhancedServiceConfigs.toArray(new ServiceConfiguration<?>[enhancedServiceConfigs.size()]); CachingTier<K, V> cachingTier = cachingTierProvider.createCachingTier(storeConfig, configurations); AuthoritativeTier<K, V> authoritativeTier = authoritativeTierProvider.createAuthoritativeTier(storeConfig, configurations); TieredStore<K, V> store = new TieredStore<K, V>(cachingTier, authoritativeTier); registerStore(store, cachingTierProvider, authoritativeTierProvider); return store; } /** * Creates a {@link TieredStoreConfiguration} and any component configurations fitting * the resources provided. * * @param storeConfig the basic {@code Store} configuration * @param enhancedServiceConfigs a modifiable list containing the collection of user-supplied * service configurations; this list is modified to include component * configurations created by this method * @param <K> the cache key type * @param <V> the cache value type * * @return the new {@code TieredStoreConfiguration} * * @throws IllegalArgumentException if the resource type set is not supported */ private <K, V> TieredStoreConfiguration setTierConfigurations( final Configuration<K, V> storeConfig, final List<ServiceConfiguration<?>> enhancedServiceConfigs) { final ResourcePools resourcePools = storeConfig.getResourcePools(); if (rank(resourcePools.getResourceTypeSet(), enhancedServiceConfigs) == 0) { throw new IllegalArgumentException("TieredStore.Provider does not support configured resource types " + resourcePools.getResourceTypeSet()); } ResourcePool heapPool = resourcePools.getPoolForResource(HEAP); ResourcePool offHeapPool = resourcePools.getPoolForResource(OFFHEAP); ResourcePool diskPool = resourcePools.getPoolForResource(DISK); // Values in SUPPORTED_RESOURCE_COMBINATIONS must mirror this logic final TieredStoreConfiguration tieredStoreConfiguration; if (diskPool != null) { if (heapPool == null) { throw new IllegalStateException("Cannot store to disk without heap resource"); } if (offHeapPool != null) { enhancedServiceConfigs.add(new CompoundCachingTierServiceConfiguration().higherProvider(OnHeapStore.Provider.class) .lowerProvider(OffHeapStore.Provider.class)); tieredStoreConfiguration = new TieredStoreConfiguration() .cachingTierProvider(CompoundCachingTier.Provider.class) .authoritativeTierProvider(OffHeapDiskStore.Provider.class); } else { tieredStoreConfiguration = new TieredStoreConfiguration() .cachingTierProvider(OnHeapStore.Provider.class) .authoritativeTierProvider(OffHeapDiskStore.Provider.class); } } else if (offHeapPool != null) { if (heapPool == null) { throw new IllegalStateException("Cannot store to offheap without heap resource"); } tieredStoreConfiguration = new TieredStoreConfiguration() .cachingTierProvider(OnHeapStore.Provider.class) .authoritativeTierProvider(OffHeapStore.Provider.class); } else { throw new IllegalStateException("TieredStore.Provider does not support heap-only stores"); } return tieredStoreConfiguration; } <K, V> void registerStore(final TieredStore<K, V> store, final CachingTier.Provider cachingTierProvider, final AuthoritativeTier.Provider authoritativeTierProvider) { if(providersMap.putIfAbsent(store, new AbstractMap.SimpleEntry<CachingTier.Provider, AuthoritativeTier.Provider>(cachingTierProvider, authoritativeTierProvider)) != null) { throw new IllegalStateException("Instance of the Store already registered!"); } } @Override public void releaseStore(Store<?, ?> resource) { Map.Entry<CachingTier.Provider, AuthoritativeTier.Provider> entry = providersMap.get(resource); if (entry == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } TieredStore tieredStore = (TieredStore) resource; entry.getKey().releaseCachingTier(tieredStore.realCachingTier); entry.getValue().releaseAuthoritativeTier(tieredStore.authoritativeTier); } @Override public void initStore(Store<?, ?> resource) { Map.Entry<CachingTier.Provider, AuthoritativeTier.Provider> entry = providersMap.get(resource); if (entry == null) { throw new IllegalArgumentException("Given store is not managed by this provider : " + resource); } TieredStore tieredStore = (TieredStore) resource; entry.getKey().initCachingTier(tieredStore.realCachingTier); entry.getValue().initAuthoritativeTier(tieredStore.authoritativeTier); } @Override public void start(ServiceProvider<Service> serviceProvider) { this.serviceProvider = serviceProvider; } @Override public void stop() { this.serviceProvider = null; providersMap.clear(); } private static class TieredStoreConfiguration { private Class<? extends CachingTier.Provider> cachingTierProvider; private Class<? extends AuthoritativeTier.Provider> authoritativeTierProvider; public TieredStoreConfiguration cachingTierProvider(Class<? extends CachingTier.Provider> cachingTierProvider) { this.cachingTierProvider = cachingTierProvider; return this; } public TieredStoreConfiguration authoritativeTierProvider(Class<? extends AuthoritativeTier.Provider> authoritativeTierProvider) { this.authoritativeTierProvider = authoritativeTierProvider; return this; } public Class<? extends CachingTier.Provider> cachingTierProvider() { return cachingTierProvider; } public Class<? extends AuthoritativeTier.Provider> authoritativeTierProvider() { return authoritativeTierProvider; } } } private static final class TieringStoreStatsSettings { @ContextAttribute("tags") private final Set<String> tags = new HashSet<String>(Arrays.asList("store")); @ContextAttribute("cachingTier") private final CachingTier<?, ?> cachingTier; @ContextAttribute("authoritativeTier") private final AuthoritativeTier<?, ?> authoritativeTier; TieringStoreStatsSettings(CachingTier<?, ?> cachingTier, AuthoritativeTier<?, ?> authoritativeTier) { this.cachingTier = cachingTier; this.authoritativeTier = authoritativeTier; } } private static class NoopCachingTier<K, V> implements CachingTier<K, V> { private final AuthoritativeTier<K, V> authoritativeTier; public NoopCachingTier(final AuthoritativeTier<K, V> authoritativeTier) { this.authoritativeTier = authoritativeTier; } @Override public ValueHolder<V> getOrComputeIfAbsent(final K key, final Function<K, ValueHolder<V>> source) throws StoreAccessException { final ValueHolder<V> apply = source.apply(key); authoritativeTier.flush(key, apply); return apply; } @Override public void invalidate(final K key) throws StoreAccessException { // noop } @Override public void invalidateAll() { // noop } @Override public void clear() throws StoreAccessException { // noop } @Override public void setInvalidationListener(final InvalidationListener<K, V> invalidationListener) { // noop } @Override public List<CacheConfigurationChangeListener> getConfigurationChangeListeners() { return null; } } }
/** * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2017 the original author or authors. */ package org.assertj.core.presentation; import static java.lang.reflect.Array.getLength; import static org.assertj.core.util.Arrays.isArray; import static org.assertj.core.util.Arrays.isArrayTypePrimitive; import static org.assertj.core.util.Arrays.isObjectArray; import static org.assertj.core.util.Preconditions.checkArgument; import static org.assertj.core.util.Strings.concat; import static org.assertj.core.util.Strings.quote; import java.io.File; import java.lang.reflect.Array; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Collection; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLongFieldUpdater; import java.util.concurrent.atomic.AtomicMarkableReference; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; import java.util.concurrent.atomic.AtomicStampedReference; import java.util.concurrent.CancellationException; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionException; import java.util.function.Function; import org.assertj.core.data.MapEntry; import org.assertj.core.groups.Tuple; import org.assertj.core.util.Arrays; import org.assertj.core.util.Compatibility; import org.assertj.core.util.DateUtil; /** * Standard java object representation. * * @author Mariusz Smykula */ public class StandardRepresentation implements Representation { // can share this as StandardRepresentation has no state public static final StandardRepresentation STANDARD_REPRESENTATION = new StandardRepresentation(); private static final String NULL = "null"; private static final String TUPPLE_START = "("; private static final String TUPPLE_END = ")"; private static final String DEFAULT_START = "["; private static final String DEFAULT_END = "]"; private static final String DEFAULT_MAX_ELEMENTS_EXCEEDED = "..."; // 4 spaces indentation : 2 space indentation after new line + '<' + '[' static final String INDENTATION_AFTER_NEWLINE = " "; // used when formatting iterables to a single line static final String INDENTATION_FOR_SINGLE_LINE = " "; public static final String ELEMENT_SEPARATOR = ","; public static final String ELEMENT_SEPARATOR_WITH_NEWLINE = ELEMENT_SEPARATOR + Compatibility.System.lineSeparator(); private static int maxLengthForSingleLineDescription = 80; private static final Map<Class<?>, Function<?, String>> customFormatterByType = new HashMap<>(); private static int maxElementsForPrinting = 1000; /** * It resets the static defaults for the standard representation. * <p> * The following defaults will be reapplied: * <ul> * <li>{@code maxLengthForSingleLineDescription = 80}</li> * <li>{@code maxElementsForPrinting = 1000}</li> * </ul> */ public static void resetDefaults() { maxLengthForSingleLineDescription = 80; maxElementsForPrinting = 1000; } public static void setMaxLengthForSingleLineDescription(int value) { checkArgument(value > 0, "maxLengthForSingleLineDescription must be > 0 but was %s", value); maxLengthForSingleLineDescription = value; } public static int getMaxLengthForSingleLineDescription() { return maxLengthForSingleLineDescription; } public static void setMaxElementsForPrinting(int value) { checkArgument(value >= 1, "maxElementsForPrinting must be >= 1, but was %s", value); maxElementsForPrinting = value; } /** * Registers new formatter for the given type. All instances of the given type will be formatted with the provided formatter. */ public static <T> void registerFormatterForType(Class<T> type, Function<T, String> formatter) { customFormatterByType.put(type, formatter); } /** * Clear all formatters registered per type with {@link #registerFormatterForType(Class, Function)}. */ public static void removeAllRegisteredFormatters() { customFormatterByType.clear(); } /** * Returns standard the {@code toString} representation of the given object. It may or not the object's own * implementation of {@code toString}. * * @param object the given object. * @return the {@code toString} representation of the given object. */ @Override public String toStringOf(Object object) { if (object == null) return null; if (hasCustomFormatterFor(object)) return customFormat(object); if (object instanceof Calendar) return toStringOf((Calendar) object); if (object instanceof Class<?>) return toStringOf((Class<?>) object); if (object instanceof Date) return toStringOf((Date) object); if (object instanceof AtomicBoolean) return toStringOf((AtomicBoolean)object); if (object instanceof AtomicInteger) return toStringOf((AtomicInteger)object); if (object instanceof AtomicLong) return toStringOf((AtomicLong)object); if (object instanceof AtomicReference) return toStringOf((AtomicReference<?>) object); if (object instanceof AtomicMarkableReference) return toStringOf((AtomicMarkableReference<?>) object); if (object instanceof AtomicStampedReference) return toStringOf((AtomicStampedReference<?>) object); if (object instanceof AtomicIntegerFieldUpdater) return AtomicIntegerFieldUpdater.class.getSimpleName(); if (object instanceof AtomicLongFieldUpdater) return AtomicLongFieldUpdater.class.getSimpleName(); if (object instanceof AtomicReferenceFieldUpdater) return AtomicReferenceFieldUpdater.class.getSimpleName(); if (object instanceof Number) return toStringOf((Number) object); if (object instanceof File) return toStringOf((File) object); if (object instanceof String) return toStringOf((String) object); if (object instanceof Character) return toStringOf((Character) object); if (object instanceof Comparator) return toStringOf((Comparator<?>) object); if (object instanceof SimpleDateFormat) return toStringOf((SimpleDateFormat) object); if (object instanceof PredicateDescription) return toStringOf((PredicateDescription) object); if (object instanceof CompletableFuture) return toStringOf((CompletableFuture<?>) object); if (isArray(object)) return formatArray(object); if (object instanceof Collection<?>) return smartFormat((Collection<?>) object); if (object instanceof Map<?, ?>) return toStringOf((Map<?, ?>) object); if (object instanceof Tuple) return toStringOf((Tuple) object); if (object instanceof MapEntry) return toStringOf((MapEntry<?, ?>) object); return object.toString(); } @SuppressWarnings("unchecked") protected <T> String customFormat(T object) { if (object == null) return null; return ((Function<T, String>) customFormatterByType.get(object.getClass())).apply(object); } protected boolean hasCustomFormatterFor(Object object) { if (object == null) return false; return customFormatterByType.containsKey(object.getClass()); } protected String toStringOf(Number number) { if (number instanceof Float) return toStringOf((Float) number); if (number instanceof Long) return toStringOf((Long) number); // fallback to default formatting return number.toString(); } protected String toStringOf(AtomicBoolean atomicBoolean) { return String.format("AtomicBoolean(%s)", atomicBoolean.get()); } protected String toStringOf(AtomicInteger atomicInteger) { return String.format("AtomicInteger(%s)", atomicInteger.get()); } protected String toStringOf(AtomicLong atomicLong) { return String.format("AtomicLong(%s)", atomicLong.get()); } protected String toStringOf(Comparator<?> comparator) { if (!comparator.toString().contains("@")) return quote(comparator.toString()); String comparatorSimpleClassName = comparator.getClass().getSimpleName(); if (comparatorSimpleClassName.length() == 0) return quote("anonymous comparator class"); // if toString has not been redefined, let's use comparator simple class name. if (comparator.toString().contains(comparatorSimpleClassName + "@")) return quote(comparatorSimpleClassName); return quote(comparator.toString()); } protected String toStringOf(Calendar c) { return DateUtil.formatAsDatetime(c); } protected String toStringOf(Class<?> c) { return c.getCanonicalName(); } protected String toStringOf(String s) { return concat("\"", s, "\""); } protected String toStringOf(Character c) { return concat("'", c, "'"); } protected String toStringOf(PredicateDescription p) { // don't enclose default description with '' return p.isDefault() ? String.format("%s", p.description) : String.format("'%s'", p.description); } protected String toStringOf(Date d) { return DateUtil.formatAsDatetimeWithMs(d); } protected String toStringOf(Float f) { return String.format("%sf", f); } protected String toStringOf(Long l) { return String.format("%sL", l); } protected String toStringOf(File f) { return f.getAbsolutePath(); } protected String toStringOf(SimpleDateFormat dateFormat) { return dateFormat.toPattern(); } protected String toStringOf(CompletableFuture<?> future) { String className = future.getClass().getSimpleName(); if (!future.isDone()) return concat(className, "[Incomplete]"); try { Object joinResult = future.join(); // avoid stack overflow error if future join on itself or another future that cycles back to the first Object joinResultRepresentation = joinResult instanceof CompletableFuture ? joinResult : toStringOf(joinResult); return concat(className, "[Completed: ", joinResultRepresentation, "]"); } catch (CompletionException e) { return concat(className, "[Failed: ", toStringOf(e.getCause()), "]"); } catch (CancellationException e) { return concat(className, "[Cancelled]"); } } protected String toStringOf(Tuple tuple) { return singleLineFormat(tuple.toList(), TUPPLE_START, TUPPLE_END); } protected String toStringOf(MapEntry<?, ?> mapEntry) { return String.format("MapEntry[key=%s, value=%s]", toStringOf(mapEntry.key), toStringOf(mapEntry.value)); } protected String toStringOf(Map<?, ?> map) { if (map == null) return null; Map<?, ?> sortedMap = toSortedMapIfPossible(map); Iterator<?> entriesIterator = sortedMap.entrySet().iterator(); if (!entriesIterator.hasNext()) return "{}"; StringBuilder builder = new StringBuilder("{"); int printedElements = 0; for (;;) { Entry<?, ?> entry = (Entry<?, ?>) entriesIterator.next(); if (printedElements == maxElementsForPrinting) { builder.append(DEFAULT_MAX_ELEMENTS_EXCEEDED); return builder.append("}").toString(); } builder.append(format(map, entry.getKey())).append('=').append(format(map, entry.getValue())); printedElements++; if (!entriesIterator.hasNext()) return builder.append("}").toString(); builder.append(", "); } } private static Map<?, ?> toSortedMapIfPossible(Map<?, ?> map) { try { return new TreeMap<>(map); } catch (ClassCastException | NullPointerException e) { return map; } } private String format(Map<?, ?> map, Object o) { return o == map ? "(this Map)" : toStringOf(o); } protected String toStringOf(AtomicReference<?> atomicReference) { return String.format("AtomicReference[%s]", toStringOf(atomicReference.get())); } protected String toStringOf(AtomicMarkableReference<?> atomicMarkableReference) { return String.format("AtomicMarkableReference[marked=%s, reference=%s]", atomicMarkableReference.isMarked(), toStringOf(atomicMarkableReference.getReference())); } protected String toStringOf(AtomicStampedReference<?> atomicStampedReference) { return String.format("AtomicStampedReference[stamp=%s, reference=%s]", atomicStampedReference.getStamp(), toStringOf(atomicStampedReference.getReference())); } @Override public String toString() { return this.getClass().getSimpleName(); } /** * Returns the {@code String} representation of the given array, or {@code null} if the given object is either * {@code null} or not an array. This method supports arrays having other arrays as elements. * * @param o the object that is expected to be an array. * @return the {@code String} representation of the given array. */ protected String formatArray(Object o) { if (!isArray(o)) return null; return isObjectArray(o) ? smartFormat(this, (Object[]) o) : formatPrimitiveArray(o); } protected String multiLineFormat(Representation representation, Object[] iterable, Set<Object[]> alreadyFormatted) { return format(iterable, ELEMENT_SEPARATOR_WITH_NEWLINE, INDENTATION_AFTER_NEWLINE, alreadyFormatted); } protected String singleLineFormat(Representation representation, Object[] iterable, String start, String end, Set<Object[]> alreadyFormatted) { return format(iterable, ELEMENT_SEPARATOR, INDENTATION_FOR_SINGLE_LINE, alreadyFormatted); } protected String smartFormat(Representation representation, Object[] iterable) { Set<Object[]> alreadyFormatted = new HashSet<>(); String singleLineDescription = singleLineFormat(representation, iterable, DEFAULT_START, DEFAULT_END, alreadyFormatted); return doesDescriptionFitOnSingleLine(singleLineDescription) ? singleLineDescription : multiLineFormat(representation, iterable, alreadyFormatted); } protected String format(Object[] array, String elementSeparator, String indentation, Set<Object[]> alreadyFormatted) { if (array == null) return null; if (array.length == 0) return DEFAULT_START + DEFAULT_END; // iterable has some elements StringBuilder desc = new StringBuilder(); desc.append(DEFAULT_START); alreadyFormatted.add(array); // used to avoid infinite recursion when array contains itself int i = 0; while (true) { Object element = array[i]; // do not indent first element if (i != 0) desc.append(indentation); if (i == maxElementsForPrinting) { desc.append(DEFAULT_MAX_ELEMENTS_EXCEEDED); alreadyFormatted.remove(array); return desc.append(DEFAULT_END).toString(); } // add element representation if (!isArray(element)) desc.append(element == null ? NULL : toStringOf(element)); else if (isArrayTypePrimitive(element)) desc.append(formatPrimitiveArray(element)); else if (alreadyFormatted.contains(element)) desc.append("(this array)"); else desc.append(format((Object[]) element, elementSeparator, indentation, alreadyFormatted)); // manage end description if (i == array.length - 1) { alreadyFormatted.remove(array); return desc.append(DEFAULT_END).toString(); } // there are still elements to describe desc.append(elementSeparator); i++; } } protected String formatPrimitiveArray(Object o) { if (!isArray(o)) return null; if (!isArrayTypePrimitive(o)) throw Arrays.notAnArrayOfPrimitives(o); int size = getLength(o); if (size == 0) return DEFAULT_START + DEFAULT_END; StringBuilder buffer = new StringBuilder(); buffer.append(DEFAULT_START); buffer.append(toStringOf(Array.get(o, 0))); for (int i = 1; i < size; i++) { buffer.append(ELEMENT_SEPARATOR) .append(INDENTATION_FOR_SINGLE_LINE); if (i == maxElementsForPrinting) { buffer.append(DEFAULT_MAX_ELEMENTS_EXCEEDED); break; } buffer.append(toStringOf(Array.get(o, i))); } buffer.append(DEFAULT_END); return buffer.toString(); } public String format(Iterable<?> iterable, String start, String end, String elementSeparator, String indentation) { if (iterable == null) return null; Iterator<?> iterator = iterable.iterator(); if (!iterator.hasNext()) return start + end; // iterable has some elements StringBuilder desc = new StringBuilder(start); boolean firstElement = true; int printedElements = 0; while (true) { Object element = iterator.next(); // do not indent first element if (firstElement) firstElement = false; else desc.append(indentation); // add element representation if (printedElements == maxElementsForPrinting) { desc.append(DEFAULT_MAX_ELEMENTS_EXCEEDED); return desc.append(end).toString(); } desc.append(element == iterable ? "(this Collection)" : toStringOf(element)); printedElements++; // manage end description if (!iterator.hasNext()) return desc.append(end).toString(); // there are still elements to be describe desc.append(elementSeparator); } } protected String multiLineFormat(Iterable<?> iterable) { return format(iterable, DEFAULT_START, DEFAULT_END, ELEMENT_SEPARATOR_WITH_NEWLINE, INDENTATION_AFTER_NEWLINE); } protected String singleLineFormat(Iterable<?> iterable, String start, String end) { return format(iterable, start, end, ELEMENT_SEPARATOR, INDENTATION_FOR_SINGLE_LINE); } /** * Returns the {@code String} representation of the given {@code Iterable}, or {@code null} if the given * {@code Iterable} is {@code null}. * <p> * The {@code Iterable} will be formatted to a single line if it does not exceed 100 char, otherwise each elements * will be formatted on a new line with 4 space indentation. * * @param iterable the {@code Iterable} to format. * @return the {@code String} representation of the given {@code Iterable}. */ protected String smartFormat(Iterable<?> iterable) { String singleLineDescription = singleLineFormat(iterable, DEFAULT_START, DEFAULT_END); return doesDescriptionFitOnSingleLine(singleLineDescription) ? singleLineDescription : multiLineFormat(iterable); } private static boolean doesDescriptionFitOnSingleLine(String singleLineDescription) { return singleLineDescription == null || singleLineDescription.length() < maxLengthForSingleLineDescription; } }
package mil.nga.giat.geowave.mapreduce.splits; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import org.apache.log4j.Logger; import mil.nga.giat.geowave.core.index.ByteArrayId; import mil.nga.giat.geowave.core.store.adapter.statistics.RowRangeHistogramStatistics; import mil.nga.giat.geowave.core.store.adapter.statistics.histogram.ByteUtils; import mil.nga.giat.geowave.core.store.index.PrimaryIndex; public class IntermediateSplitInfo implements Comparable<IntermediateSplitInfo> { private final static Logger LOGGER = Logger.getLogger(IntermediateSplitInfo.class); protected class IndexRangeLocation { private RangeLocationPair rangeLocationPair; private final PrimaryIndex index; public IndexRangeLocation( final RangeLocationPair rangeLocationPair, final PrimaryIndex index ) { this.rangeLocationPair = rangeLocationPair; this.index = index; } public IndexRangeLocation split( final RowRangeHistogramStatistics<?> stats, final double currentCardinality, final double targetCardinality ) { if (stats == null) { return null; } final double thisCardinalty = rangeLocationPair.getCardinality(); final double fraction = (targetCardinality - currentCardinality) / thisCardinalty; final byte[] start = rangeLocationPair.getRange().getStartKey(); final byte[] end = rangeLocationPair.getRange().getEndKey(); final double cdfStart = stats.cdf(start); final double cdfEnd = stats.cdf(end); final double expectedEndValue = stats.quantile(cdfStart + ((cdfEnd - cdfStart) * fraction)); final int maxCardinality = Math.max( start.length, end.length); byte[] bytes = ByteUtils.toBytes(expectedEndValue); byte[] splitKey; if ((bytes.length < 8) && (bytes.length < maxCardinality)) { // prepend with 0 bytes = expandBytes( bytes, Math.min( 8, maxCardinality)); } if (bytes.length < maxCardinality) { splitKey = new byte[maxCardinality]; System.arraycopy( bytes, 0, splitKey, 0, bytes.length); } else { splitKey = bytes; } final String location = rangeLocationPair.getLocation(); final boolean startKeyInclusive = true; final boolean endKeyInclusive = false; if (new ByteArrayId( start).compareTo(new ByteArrayId( splitKey)) >= 0 || new ByteArrayId( end).compareTo(new ByteArrayId( splitKey)) <= 0) { splitKey = SplitsProvider.getMidpoint(rangeLocationPair.getRange()); if (splitKey == null) { return null; } // if you can split the range only by setting the split to the // end, but its not inclusive on the end, just clamp this to the // start and don't split producing a new pair if (Arrays.equals( end, splitKey) && !rangeLocationPair.getRange().isEndKeyInclusive()) { rangeLocationPair = splitsProvider.constructRangeLocationPair( splitsProvider.constructRange( rangeLocationPair.getRange().getStartKey(), rangeLocationPair.getRange().isStartKeyInclusive(), splitKey, endKeyInclusive), location, stats.cardinality( rangeLocationPair.getRange().getStartKey(), splitKey)); return null; } // } } try { final RangeLocationPair newPair = splitsProvider.constructRangeLocationPair( splitsProvider.constructRange( rangeLocationPair.getRange().getStartKey(), rangeLocationPair.getRange().isStartKeyInclusive(), splitKey, endKeyInclusive), location, stats.cardinality( rangeLocationPair.getRange().getStartKey(), splitKey)); rangeLocationPair = splitsProvider.constructRangeLocationPair( splitsProvider.constructRange( splitKey, startKeyInclusive, rangeLocationPair.getRange().getEndKey(), rangeLocationPair.getRange().isEndKeyInclusive()), location, stats.cardinality( splitKey, rangeLocationPair.getRange().getEndKey())); return new IndexRangeLocation( newPair, index); } catch (final java.lang.IllegalArgumentException ex) { LOGGER.info( "Unable to split range", ex); return null; } } private byte[] expandBytes( final byte valueBytes[], final int numBytes ) { final byte[] bytes = new byte[numBytes]; int expansion = 0; if (numBytes > valueBytes.length) { expansion = (numBytes - valueBytes.length); for (int i = 0; i < expansion; i++) { bytes[i] = 0; } for (int i = 0; i < valueBytes.length; i++) { bytes[expansion + i] = valueBytes[i]; } } else { for (int i = 0; i < numBytes; i++) { bytes[i] = valueBytes[i]; } } return bytes; } } private final Map<PrimaryIndex, List<RangeLocationPair>> splitInfo; private final SplitsProvider splitsProvider; public IntermediateSplitInfo( final Map<PrimaryIndex, List<RangeLocationPair>> splitInfo, final SplitsProvider splitsProvider ) { this.splitInfo = splitInfo; this.splitsProvider = splitsProvider; } synchronized void merge( final IntermediateSplitInfo split ) { for (final Entry<PrimaryIndex, List<RangeLocationPair>> e : split.splitInfo.entrySet()) { List<RangeLocationPair> thisList = splitInfo.get(e.getKey()); if (thisList == null) { thisList = new ArrayList<RangeLocationPair>(); splitInfo.put( e.getKey(), thisList); } thisList.addAll(e.getValue()); } } /** * Side effect: Break up this split. * * Split the ranges into two * * @return the new split. */ synchronized IntermediateSplitInfo split( final Map<PrimaryIndex, RowRangeHistogramStatistics<?>> statsCache ) { // generically you'd want the split to be as limiting to total // locations as possible and then as limiting as possible to total // indices, but in this case split() is only called when all ranges // are in the same location and the same index final TreeSet<IndexRangeLocation> orderedSplits = new TreeSet<IndexRangeLocation>( new Comparator<IndexRangeLocation>() { @Override public int compare( final IndexRangeLocation o1, final IndexRangeLocation o2 ) { return (o1.rangeLocationPair.getCardinality() - o2.rangeLocationPair.getCardinality()) < 0 ? -1 : 1; } }); for (final Entry<PrimaryIndex, List<RangeLocationPair>> ranges : splitInfo.entrySet()) { for (final RangeLocationPair p : ranges.getValue()) { orderedSplits.add(new IndexRangeLocation( p, ranges.getKey())); } } final double targetCardinality = getTotalRangeAtCardinality() / 2; double currentCardinality = 0.0; final Map<PrimaryIndex, List<RangeLocationPair>> otherSplitInfo = new HashMap<PrimaryIndex, List<RangeLocationPair>>(); splitInfo.clear(); do { final IndexRangeLocation next = orderedSplits.pollFirst(); double nextCardinality = currentCardinality + next.rangeLocationPair.getCardinality(); if (nextCardinality > targetCardinality) { final IndexRangeLocation newSplit = next.split( statsCache.get(next.index), currentCardinality, targetCardinality); double splitCardinality = next.rangeLocationPair.getCardinality(); // Stats can have inaccuracies over narrow ranges // thus, a split based on statistics may not be found if (newSplit != null) { splitCardinality += newSplit.rangeLocationPair.getCardinality(); addPairForIndex( otherSplitInfo, newSplit.rangeLocationPair, newSplit.index); addPairForIndex( splitInfo, next.rangeLocationPair, next.index); } else { // Still add to the other SPLIT if there is remaining // pairs // in this SPLIT addPairForIndex( (!orderedSplits.isEmpty()) ? otherSplitInfo : splitInfo, next.rangeLocationPair, next.index); } nextCardinality = currentCardinality + splitCardinality; if (nextCardinality > targetCardinality) { break; } currentCardinality = nextCardinality; } else { addPairForIndex( otherSplitInfo, next.rangeLocationPair, next.index); currentCardinality = nextCardinality; } } while (!orderedSplits.isEmpty()); // What is left of the ranges // that haven't been placed in the other split info for (final IndexRangeLocation split : orderedSplits) { addPairForIndex( splitInfo, split.rangeLocationPair, split.index); } // All ranges consumed by the other split if (splitInfo.size() == 0) { // First try to move a index set of ranges back. if (otherSplitInfo.size() > 1) { final Iterator<Entry<PrimaryIndex, List<RangeLocationPair>>> it = otherSplitInfo.entrySet().iterator(); final Entry<PrimaryIndex, List<RangeLocationPair>> entry = it.next(); it.remove(); splitInfo.put( entry.getKey(), entry.getValue()); } else { splitInfo.putAll(otherSplitInfo); otherSplitInfo.clear(); } } return otherSplitInfo.size() == 0 ? null : new IntermediateSplitInfo( otherSplitInfo, splitsProvider); } private void addPairForIndex( final Map<PrimaryIndex, List<RangeLocationPair>> otherSplitInfo, final RangeLocationPair pair, final PrimaryIndex index ) { List<RangeLocationPair> list = otherSplitInfo.get(index); if (list == null) { list = new ArrayList<RangeLocationPair>(); otherSplitInfo.put( index, list); } list.add(pair); } public synchronized GeoWaveInputSplit toFinalSplit() { final Set<String> locations = new HashSet<String>(); for (final Entry<PrimaryIndex, List<RangeLocationPair>> entry : splitInfo.entrySet()) { for (final RangeLocationPair pair : entry.getValue()) { locations.add(pair.getLocation()); } } return splitsProvider.constructInputSplit( splitInfo, locations.toArray(new String[locations.size()])); } @Override public int compareTo( final IntermediateSplitInfo o ) { final double thisTotal = getTotalRangeAtCardinality(); final double otherTotal = o.getTotalRangeAtCardinality(); int result = Double.compare( thisTotal, otherTotal); if (result == 0) { result = Integer.compare( splitInfo.size(), o.splitInfo.size()); if (result == 0) { final List<RangeLocationPair> pairs = new ArrayList<>(); final List<RangeLocationPair> otherPairs = new ArrayList<>(); double rangeSum = 0; double otherSum = 0; for (final List<RangeLocationPair> p : splitInfo.values()) { pairs.addAll(p); } for (final List<RangeLocationPair> p : o.splitInfo.values()) { otherPairs.addAll(p); } result = Integer.compare( pairs.size(), otherPairs.size()); if (result == 0) { for (final RangeLocationPair p : pairs) { rangeSum += SplitsProvider.getRangeLength(p.getRange()); } for (final RangeLocationPair p : otherPairs) { otherSum += SplitsProvider.getRangeLength(p.getRange()); } result = Double.compare( rangeSum, otherSum); if (result == 0) { result = Integer.compare( hashCode(), o.hashCode()); } } } } return result; } @Override public int hashCode() { final int prime = 31; int result = 1; result = (prime * result) + ((splitInfo == null) ? 0 : splitInfo.hashCode()); result = (prime * result) + ((splitsProvider == null) ? 0 : splitsProvider.hashCode()); return result; } @Override public boolean equals( final Object obj ) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final IntermediateSplitInfo other = (IntermediateSplitInfo) obj; if (splitInfo == null) { if (other.splitInfo != null) { return false; } } else if (!splitInfo.equals(other.splitInfo)) { return false; } if (splitsProvider == null) { if (other.splitsProvider != null) { return false; } } else if (!splitsProvider.equals(other.splitsProvider)) { return false; } return true; } private synchronized double getTotalRangeAtCardinality() { double sum = 0.0; for (final List<RangeLocationPair> pairList : splitInfo.values()) { for (final RangeLocationPair pair : pairList) { sum += pair.getCardinality(); } } return sum; } }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package com.cloud.utils.crypt; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Properties; import org.jasypt.encryption.pbe.StandardPBEStringEncryptor; import org.jasypt.encryption.pbe.config.SimpleStringPBEConfig; import org.jasypt.exceptions.EncryptionOperationNotPossibleException; import org.jasypt.properties.EncryptableProperties; import com.cloud.utils.PropertiesUtil; import com.cloud.utils.db.Transaction; import com.cloud.utils.exception.CloudRuntimeException; import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration.PropertiesConfiguration; /* * EncryptionSecretKeyChanger updates Management Secret Key / DB Secret Key or both. * DB secret key is validated against the key in db.properties * db.properties is updated with values encrypted using new MS secret key * DB data migrated using new DB secret key */ public class EncryptionSecretKeyChanger { private StandardPBEStringEncryptor oldEncryptor = new StandardPBEStringEncryptor(); private StandardPBEStringEncryptor newEncryptor = new StandardPBEStringEncryptor(); private static final String keyFile = "/etc/cloudstack/management/key"; public static void main(String[] args){ List<String> argsList = Arrays.asList(args); Iterator<String> iter = argsList.iterator(); String oldMSKey = null; String oldDBKey = null; String newMSKey = null; String newDBKey = null; //Parse command-line args while (iter.hasNext()) { String arg = iter.next(); // Old MS Key if (arg.equals("-m")) { oldMSKey = iter.next(); } // Old DB Key if (arg.equals("-d")) { oldDBKey = iter.next(); } // New MS Key if (arg.equals("-n")) { newMSKey = iter.next(); } // New DB Key if (arg.equals("-e")) { newDBKey = iter.next(); } } if(oldMSKey == null || oldDBKey ==null){ System.out.println("Existing MS secret key or DB secret key is not provided"); usage(); return; } if(newMSKey == null && newDBKey ==null){ System.out.println("New MS secret key and DB secret are both not provided"); usage(); return; } final File dbPropsFile = PropertiesUtil.findConfigFile("db.properties"); final Properties dbProps; EncryptionSecretKeyChanger keyChanger = new EncryptionSecretKeyChanger(); StandardPBEStringEncryptor encryptor = new StandardPBEStringEncryptor(); keyChanger.initEncryptor(encryptor, oldMSKey); dbProps = new EncryptableProperties(encryptor); PropertiesConfiguration backupDBProps = null; System.out.println("Parsing db.properties file"); try { dbProps.load(new FileInputStream(dbPropsFile)); backupDBProps = new PropertiesConfiguration(dbPropsFile); } catch (FileNotFoundException e) { System.out.println("db.properties file not found while reading DB secret key" +e.getMessage()); } catch (IOException e) { System.out.println("Error while reading DB secret key from db.properties" +e.getMessage()); } catch (ConfigurationException e) { e.printStackTrace(); } String dbSecretKey = null; try { dbSecretKey = dbProps.getProperty("db.cloud.encrypt.secret"); } catch (EncryptionOperationNotPossibleException e) { System.out.println("Failed to decrypt existing DB secret key from db.properties. "+e.getMessage()); return; } if(!oldDBKey.equals(dbSecretKey)){ System.out.println("Incorrect MS Secret Key or DB Secret Key"); return; } System.out.println("Secret key provided matched the key in db.properties"); final String encryptionType = dbProps.getProperty("db.cloud.encryption.type"); if(newMSKey == null){ System.out.println("No change in MS Key. Skipping migrating db.properties"); } else { if(!keyChanger.migrateProperties(dbPropsFile, dbProps, newMSKey, newDBKey)){ System.out.println("Failed to update db.properties"); return; } else { //db.properties updated successfully if(encryptionType.equals("file")){ //update key file with new MS key try { FileWriter fwriter = new FileWriter(keyFile); BufferedWriter bwriter = new BufferedWriter(fwriter); bwriter.write(newMSKey); bwriter.close(); } catch (IOException e) { System.out.println("Failed to write new secret to file. Please update the file manually"); } } } } boolean success = false; if(newDBKey == null || newDBKey.equals(oldDBKey)){ System.out.println("No change in DB Secret Key. Skipping Data Migration"); } else { EncryptionSecretKeyChecker.initEncryptorForMigration(oldMSKey); try { success = keyChanger.migrateData(oldDBKey, newDBKey); } catch (Exception e) { System.out.println("Error during data migration"); e.printStackTrace(); success = false; } } if(success){ System.out.println("Successfully updated secret key(s)"); } else { System.out.println("Data Migration failed. Reverting db.properties"); //revert db.properties try { backupDBProps.save(); } catch (ConfigurationException e) { e.printStackTrace(); } if(encryptionType.equals("file")){ //revert secret key in file try { FileWriter fwriter = new FileWriter(keyFile); BufferedWriter bwriter = new BufferedWriter(fwriter); bwriter.write(oldMSKey); bwriter.close(); } catch (IOException e) { System.out.println("Failed to revert to old secret to file. Please update the file manually"); } } } } private boolean migrateProperties(File dbPropsFile, Properties dbProps, String newMSKey, String newDBKey){ System.out.println("Migrating db.properties.."); StandardPBEStringEncryptor msEncryptor = new StandardPBEStringEncryptor();; initEncryptor(msEncryptor, newMSKey); try { PropertiesConfiguration newDBProps = new PropertiesConfiguration(dbPropsFile); if(newDBKey!=null && !newDBKey.isEmpty()){ newDBProps.setProperty("db.cloud.encrypt.secret", "ENC("+msEncryptor.encrypt(newDBKey)+")"); } String prop = dbProps.getProperty("db.cloud.password"); if(prop!=null && !prop.isEmpty()){ newDBProps.setProperty("db.cloud.password", "ENC("+msEncryptor.encrypt(prop)+")"); } prop = dbProps.getProperty("db.usage.password"); if(prop!=null && !prop.isEmpty()){ newDBProps.setProperty("db.usage.password", "ENC("+msEncryptor.encrypt(prop)+")"); } newDBProps.save(dbPropsFile.getAbsolutePath()); } catch (Exception e) { e.printStackTrace(); return false; } System.out.println("Migrating db.properties Done."); return true; } private boolean migrateData(String oldDBKey, String newDBKey){ System.out.println("Begin Data migration"); initEncryptor(oldEncryptor, oldDBKey); initEncryptor(newEncryptor, newDBKey); System.out.println("Initialised Encryptors"); Transaction txn = Transaction.open("Migrate"); txn.start(); try { Connection conn; try { conn = txn.getConnection(); } catch (SQLException e) { throw new CloudRuntimeException("Unable to migrate encrypted data in the database", e); } migrateConfigValues(conn); migrateHostDetails(conn); migrateVNCPassword(conn); migrateUserCredentials(conn); txn.commit(); } finally { txn.close(); } System.out.println("End Data migration"); return true; } private void initEncryptor(StandardPBEStringEncryptor encryptor, String secretKey){ encryptor.setAlgorithm("PBEWithMD5AndDES"); SimpleStringPBEConfig stringConfig = new SimpleStringPBEConfig(); stringConfig.setPassword(secretKey); encryptor.setConfig(stringConfig); } private String migrateValue(String value){ if(value ==null || value.isEmpty()){ return value; } String decryptVal = oldEncryptor.decrypt(value); return newEncryptor.encrypt(decryptVal); } private void migrateConfigValues(Connection conn) { System.out.println("Begin migrate config values"); PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = conn.prepareStatement("select name, value from configuration where category in ('Hidden', 'Secure')"); rs = pstmt.executeQuery(); while (rs.next()) { String name = rs.getString(1); String value = rs.getString(2); if(value == null || value.isEmpty()){ continue; } String encryptedValue = migrateValue(value); pstmt = conn.prepareStatement("update configuration set value=? where name=?"); pstmt.setBytes(1, encryptedValue.getBytes("UTF-8")); pstmt.setString(2, name); pstmt.executeUpdate(); } } catch (SQLException e) { throw new CloudRuntimeException("Unable to update configuration values ", e); } catch (UnsupportedEncodingException e) { throw new CloudRuntimeException("Unable to update configuration values ", e); } finally { try { if (rs != null) { rs.close(); } if (pstmt != null) { pstmt.close(); } } catch (SQLException e) { } } System.out.println("End migrate config values"); } private void migrateHostDetails(Connection conn) { System.out.println("Begin migrate host details"); PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = conn.prepareStatement("select id, value from host_details where name = 'password'"); rs = pstmt.executeQuery(); while (rs.next()) { long id = rs.getLong(1); String value = rs.getString(2); if(value == null || value.isEmpty()){ continue; } String encryptedValue = migrateValue(value); pstmt = conn.prepareStatement("update host_details set value=? where id=?"); pstmt.setBytes(1, encryptedValue.getBytes("UTF-8")); pstmt.setLong(2, id); pstmt.executeUpdate(); } } catch (SQLException e) { throw new CloudRuntimeException("Unable update host_details values ", e); } catch (UnsupportedEncodingException e) { throw new CloudRuntimeException("Unable update host_details values ", e); } finally { try { if (rs != null) { rs.close(); } if (pstmt != null) { pstmt.close(); } } catch (SQLException e) { } } System.out.println("End migrate host details"); } private void migrateVNCPassword(Connection conn) { System.out.println("Begin migrate VNC password"); PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = conn.prepareStatement("select id, vnc_password from vm_instance"); rs = pstmt.executeQuery(); while (rs.next()) { long id = rs.getLong(1); String value = rs.getString(2); if(value == null || value.isEmpty()){ continue; } String encryptedValue = migrateValue(value); pstmt = conn.prepareStatement("update vm_instance set vnc_password=? where id=?"); pstmt.setBytes(1, encryptedValue.getBytes("UTF-8")); pstmt.setLong(2, id); pstmt.executeUpdate(); } } catch (SQLException e) { throw new CloudRuntimeException("Unable update vm_instance vnc_password ", e); } catch (UnsupportedEncodingException e) { throw new CloudRuntimeException("Unable update vm_instance vnc_password ", e); } finally { try { if (rs != null) { rs.close(); } if (pstmt != null) { pstmt.close(); } } catch (SQLException e) { } } System.out.println("End migrate VNC password"); } private void migrateUserCredentials(Connection conn) { System.out.println("Begin migrate user credentials"); PreparedStatement pstmt = null; ResultSet rs = null; try { pstmt = conn.prepareStatement("select id, secret_key from user"); rs = pstmt.executeQuery(); while (rs.next()) { long id = rs.getLong(1); String secretKey = rs.getString(2); if(secretKey == null || secretKey.isEmpty()){ continue; } String encryptedSecretKey = migrateValue(secretKey); pstmt = conn.prepareStatement("update user set secret_key=? where id=?"); pstmt.setBytes(1, encryptedSecretKey.getBytes("UTF-8")); pstmt.setLong(2, id); pstmt.executeUpdate(); } } catch (SQLException e) { throw new CloudRuntimeException("Unable update user secret key ", e); } catch (UnsupportedEncodingException e) { throw new CloudRuntimeException("Unable update user secret key ", e); } finally { try { if (rs != null) { rs.close(); } if (pstmt != null) { pstmt.close(); } } catch (SQLException e) { } } System.out.println("End migrate user credentials"); } private static void usage(){ System.out.println("Usage: \tEncryptionSecretKeyChanger \n" + "\t\t-m <Mgmt Secret Key> \n" + "\t\t-d <DB Secret Key> \n" + "\t\t-n [New Mgmt Secret Key] \n" + "\t\t-e [New DB Secret Key]"); } }
/* * This file is part of the OpenSCADA project * Copyright (C) 2006-2009 TH4 SYSTEMS GmbH (http://th4-systems.com) * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. */ package org.openscada.opc.dcom.da; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Random; import org.jinterop.dcom.common.JIException; import org.jinterop.dcom.common.JISystem; import org.jinterop.dcom.core.IJIComObject; import org.jinterop.dcom.core.JIComServer; import org.jinterop.dcom.core.JIProgId; import org.jinterop.dcom.core.JISession; import org.jinterop.dcom.core.JIVariant; import org.openscada.opc.dcom.common.EventHandler; import org.openscada.opc.dcom.common.KeyedResult; import org.openscada.opc.dcom.common.KeyedResultSet; import org.openscada.opc.dcom.common.Result; import org.openscada.opc.dcom.common.ResultSet; import org.openscada.opc.dcom.common.impl.OPCCommon; import org.openscada.opc.dcom.da.impl.OPCBrowseServerAddressSpace; import org.openscada.opc.dcom.da.impl.OPCGroupStateMgt; import org.openscada.opc.dcom.da.impl.OPCItemIO; import org.openscada.opc.dcom.da.impl.OPCItemMgt; import org.openscada.opc.dcom.da.impl.OPCItemProperties; import org.openscada.opc.dcom.da.impl.OPCServer; import org.openscada.opc.dcom.da.impl.OPCSyncIO; public class Test1 { private static JISession _session = null; public static void showError ( final OPCCommon common, final int errorCode ) throws JIException { System.out.println ( String.format ( "Error (%X): '%s'", errorCode, common.getErrorString ( errorCode, 1033 ) ) ); } public static void showError ( final OPCServer server, final int errorCode ) throws JIException { showError ( server.getCommon (), errorCode ); } public static void showAccessPaths ( final OPCBrowseServerAddressSpace browser, final String id ) throws IllegalArgumentException, UnknownHostException, JIException { for ( final String i : browser.browseAccessPaths ( id ).asCollection () ) { System.out.println ( "AccessPath Entry: " + i ); } } public static void browseTree ( final OPCBrowseServerAddressSpace browser ) throws IllegalArgumentException, UnknownHostException, JIException { System.out.println ( "Showing hierarchial address space" ); System.out.println ( String.format ( "Organization: %s", browser.queryOrganization () ) ); if ( !browser.queryOrganization ().equals ( OPCNAMESPACETYPE.OPC_NS_HIERARCHIAL ) ) { return; } browser.changePosition ( null, OPCBROWSEDIRECTION.OPC_BROWSE_TO ); browseTree ( browser, 0 ); } protected static void browseTree ( final OPCBrowseServerAddressSpace browser, final int level ) throws JIException, IllegalArgumentException, UnknownHostException { final StringBuilder indent = new StringBuilder ( level ); for ( int i = 0; i < level; i++ ) { indent.append ( '\t' ); } for ( final String item : browser.browse ( OPCBROWSETYPE.OPC_LEAF, "", 0, JIVariant.VT_EMPTY ).asCollection () ) { System.out.println ( indent + "Leaf: " + item ); System.out.println ( indent + "\tName: " + browser.getItemID ( item ) ); } for ( final String item : browser.browse ( OPCBROWSETYPE.OPC_BRANCH, "", 0, JIVariant.VT_EMPTY ).asCollection () ) { System.out.println ( indent + "Branch: " + item ); browser.changePosition ( item, OPCBROWSEDIRECTION.OPC_BROWSE_DOWN ); browseTree ( browser, level + 1 ); browser.changePosition ( null, OPCBROWSEDIRECTION.OPC_BROWSE_UP ); } } public static void browseFlat ( final OPCBrowseServerAddressSpace browser ) throws JIException, IllegalArgumentException, UnknownHostException { System.out.println ( String.format ( "Organization: %s", browser.queryOrganization () ) ); browser.changePosition ( null, OPCBROWSEDIRECTION.OPC_BROWSE_TO ); System.out.println ( "Showing flat address space" ); for ( final String id : browser.browse ( OPCBROWSETYPE.OPC_FLAT, "", 0, JIVariant.VT_EMPTY ).asCollection () ) { System.out.println ( "Item: " + id ); //showAccessPaths ( browser, id ); } } public static void dumpGroupState ( final OPCGroupStateMgt group ) throws JIException { final OPCGroupState state = group.getState (); System.out.println ( "Name: " + state.getName () ); System.out.println ( "Active: " + state.isActive () ); System.out.println ( "Update Rate: " + state.getUpdateRate () ); System.out.println ( "Time Bias: " + state.getTimeBias () ); System.out.println ( "Percent Deadband: " + state.getPercentDeadband () ); System.out.println ( "Locale ID: " + state.getLocaleID () ); System.out.println ( "Client Handle: " + state.getClientHandle () ); System.out.println ( "Server Handle: " + state.getServerHandle () ); } public static void dumpItemProperties2 ( final OPCItemProperties itemProperties, final String itemID, final int... ids ) throws JIException { final KeyedResultSet<Integer, JIVariant> values = itemProperties.getItemProperties ( itemID, ids ); for ( final KeyedResult<Integer, JIVariant> entry : values ) { System.out.println ( String.format ( "ID: %d, Value: %s, Error Code: %08x", entry.getKey (), entry.getValue ().toString (), entry.getErrorCode () ) ); } } public static void dumpItemPropertiesLookup ( final OPCItemProperties itemProperties, final String itemID, final int... ids ) throws JIException { final KeyedResultSet<Integer, String> values = itemProperties.lookupItemIDs ( itemID, ids ); for ( final KeyedResult<Integer, String> entry : values ) { System.out.println ( String.format ( "ID: %d, Item ID: %s, Error Code: %08x", entry.getKey (), entry.getValue (), entry.getErrorCode () ) ); } } public static void dumpItemProperties ( final OPCItemProperties itemProperties, final String itemID ) throws JIException { final Collection<PropertyDescription> properties = itemProperties.queryAvailableProperties ( itemID ); final int[] ids = new int[properties.size ()]; System.out.println ( String.format ( "Item Properties for '%s' (count:%d)", itemID, properties.size () ) ); int i = 0; for ( final PropertyDescription pd : properties ) { ids[i] = pd.getId (); System.out.println ( "ID: " + pd.getId () ); System.out.println ( "Description: " + pd.getDescription () ); System.out.println ( "Variable Type: " + pd.getVarType () ); i++; } System.out.println ( "Lookup" ); dumpItemPropertiesLookup ( itemProperties, itemID, ids ); System.out.println ( "Query" ); dumpItemProperties2 ( itemProperties, itemID, ids ); } public static void queryItems ( final OPCItemIO itemIO, final String... items ) throws JIException { final List<IORequest> requests = new LinkedList<IORequest> (); for ( final String item : items ) { requests.add ( new IORequest ( item, 0 ) ); } itemIO.read ( requests.toArray ( new IORequest[0] ) ); } public static boolean dumpOPCITEMRESULT ( final KeyedResultSet<OPCITEMDEF, OPCITEMRESULT> result ) { int failed = 0; for ( final KeyedResult<OPCITEMDEF, OPCITEMRESULT> resultEntry : result ) { System.out.println ( "==================================" ); System.out.println ( String.format ( "Item: '%s' ", resultEntry.getKey ().getItemID () ) ); System.out.println ( String.format ( "Error Code: %08x", resultEntry.getErrorCode () ) ); if ( !resultEntry.isFailed () ) { System.out.println ( String.format ( "Server Handle: %08X", resultEntry.getValue ().getServerHandle () ) ); System.out.println ( String.format ( "Data Type: %d", resultEntry.getValue ().getCanonicalDataType () ) ); System.out.println ( String.format ( "Access Rights: %d", resultEntry.getValue ().getAccessRights () ) ); System.out.println ( String.format ( "Reserved: %d", resultEntry.getValue ().getReserved () ) ); } else { failed++; } } return failed == 0; } public static void writeItems ( final OPCServer server, final OPCGroupStateMgt group, final WriteTest... writeTests ) throws IllegalArgumentException, UnknownHostException, JIException { System.out.println ( "Write items" ); final OPCItemMgt itemManagement = group.getItemManagement (); final OPCITEMDEF[] defs = new OPCITEMDEF[writeTests.length]; for ( int i = 0; i < writeTests.length; i++ ) { final OPCITEMDEF def = new OPCITEMDEF (); def.setActive ( true ); def.setItemID ( writeTests[i].getItemID () ); //def.setRequestedDataType ( (short)writeTests[i].getValue ().getType () ); defs[i] = def; System.out.println ( String.format ( "%s <= (%d) %s", writeTests[i].getItemID (), writeTests[i].getValue ().getType (), writeTests[i].getValue ().toString () ) ); } System.out.println ( "Add to group" ); final KeyedResultSet<OPCITEMDEF, OPCITEMRESULT> result = itemManagement.add ( defs ); final WriteRequest[] writeRequests = new WriteRequest[writeTests.length]; final Integer[] serverHandles = new Integer[writeTests.length]; for ( int i = 0; i < writeTests.length; i++ ) { if ( result.get ( i ).getErrorCode () != 0 ) { throw new JIException ( result.get ( i ).getErrorCode () ); } writeRequests[i] = new WriteRequest ( result.get ( i ).getValue ().getServerHandle (), writeTests[i].getValue () ); serverHandles[i] = writeRequests[i].getServerHandle (); System.out.println ( String.format ( "Item: %s, VT: %d", writeTests[i].getItemID (), result.get ( i ).getValue ().getCanonicalDataType () ) ); } System.out.println ( "Perform write" ); final OPCSyncIO syncIO = group.getSyncIO (); final ResultSet<WriteRequest> writeResults = syncIO.write ( writeRequests ); for ( int i = 0; i < writeTests.length; i++ ) { final Result<WriteRequest> writeResult = writeResults.get ( i ); System.out.println ( String.format ( "ItemID: %s, ErrorCode: %08X", writeTests[i].getItemID (), writeResult.getErrorCode () ) ); if ( writeResult.getErrorCode () != 0 ) { showError ( server, writeResult.getErrorCode () ); } } // finally remove them again System.out.println ( "Remove from group" ); itemManagement.remove ( serverHandles ); System.out.println ( "Write items...complete" ); } public static void testItems ( final OPCServer server, final OPCGroupStateMgt group, final String... itemIDs ) throws IllegalArgumentException, UnknownHostException, JIException { final OPCItemMgt itemManagement = group.getItemManagement (); final List<OPCITEMDEF> items = new ArrayList<OPCITEMDEF> ( itemIDs.length ); for ( final String id : itemIDs ) { final OPCITEMDEF item = new OPCITEMDEF (); item.setItemID ( id ); item.setClientHandle ( new Random ().nextInt () ); items.add ( item ); } final OPCITEMDEF[] itemArray = items.toArray ( new OPCITEMDEF[0] ); System.out.println ( "Validate" ); KeyedResultSet<OPCITEMDEF, OPCITEMRESULT> result = itemManagement.validate ( itemArray ); if ( !dumpOPCITEMRESULT ( result ) ) { return; } // now add them to the group System.out.println ( "Add" ); result = itemManagement.add ( itemArray ); if ( !dumpOPCITEMRESULT ( result ) ) { return; } // get the server handle array final Integer[] serverHandles = new Integer[itemArray.length]; for ( int i = 0; i < itemArray.length; i++ ) { serverHandles[i] = new Integer ( result.get ( i ).getValue ().getServerHandle () ); } // set them active System.out.println ( "Activate" ); final ResultSet<Integer> resultSet = itemManagement.setActiveState ( true, serverHandles ); for ( final Result<Integer> resultEntry : resultSet ) { System.out.println ( String.format ( "Item: %08X, Error: %08X", resultEntry.getValue (), resultEntry.getErrorCode () ) ); } // set client handles System.out.println ( "Set client handles" ); final Integer[] clientHandles = new Integer[serverHandles.length]; for ( int i = 0; i < serverHandles.length; i++ ) { clientHandles[i] = i; } itemManagement.setClientHandles ( serverHandles, clientHandles ); System.out.println ( "Create async IO 2.0 object" ); // OPCAsyncIO2 asyncIO2 = group.getAsyncIO2 (); // connect handler System.out.println ( "attach" ); final EventHandler eventHandler = group.attach ( new DumpDataCallback () ); /* System.out.println ( "attach..enable" ); asyncIO2.setEnable ( true ); System.out.println ( "attach..refresh" ); asyncIO2.refresh ( (short)1, 1 ); */ // sleep try { System.out.println ( "Waiting..." ); Thread.sleep ( 10 * 1000 ); } catch ( final InterruptedException e ) { // TODO Auto-generated catch block e.printStackTrace (); } eventHandler.detach (); // sync IO - read final OPCSyncIO syncIO = group.getSyncIO (); final KeyedResultSet<Integer, OPCITEMSTATE> itemState = syncIO.read ( OPCDATASOURCE.OPC_DS_DEVICE, serverHandles ); for ( final KeyedResult<Integer, OPCITEMSTATE> itemStateEntry : itemState ) { final int errorCode = itemStateEntry.getErrorCode (); System.out.println ( String.format ( "Server ID: %08X, Value: %s, Timestamp: %d/%d (%Tc), Quality: %d, Error: %08X", itemStateEntry.getKey (), itemStateEntry.getValue ().getValue (), itemStateEntry.getValue ().getTimestamp ().getHigh (), itemStateEntry.getValue ().getTimestamp ().getLow (), itemStateEntry.getValue ().getTimestamp ().asCalendar (), itemStateEntry.getValue ().getQuality (), errorCode ) ); if ( errorCode != 0 ) { showError ( server, errorCode ); } } // set them inactive System.out.println ( "In-Active" ); itemManagement.setActiveState ( false, serverHandles ); // finally remove them again System.out.println ( "Remove" ); itemManagement.remove ( serverHandles ); } public static void dumpServerStatus ( final OPCServer server ) throws JIException { final OPCSERVERSTATUS status = server.getStatus (); System.out.println ( "===== SERVER STATUS ======" ); System.out.println ( "State: " + status.getServerState ().toString () ); System.out.println ( "Vendor: " + status.getVendorInfo () ); System.out.println ( String.format ( "Version: %d.%d.%d", status.getMajorVersion (), status.getMinorVersion (), status.getBuildNumber () ) ); System.out.println ( "Groups: " + status.getGroupCount () ); System.out.println ( "Bandwidth: " + status.getBandWidth () ); System.out.println ( String.format ( "Start Time: %tc", status.getStartTime ().asCalendar () ) ); System.out.println ( String.format ( "Current Time: %tc", status.getCurrentTime ().asCalendar () ) ); System.out.println ( String.format ( "Last Update Time: %tc", status.getLastUpdateTime ().asCalendar () ) ); System.out.println ( "===== SERVER STATUS ======" ); } public static void enumerateGroups ( final OPCServer server, final OPCENUMSCOPE scope ) throws IllegalArgumentException, UnknownHostException, JIException { System.out.println ( "Enum Groups: " + scope.toString () ); for ( final String group : server.getGroups ( scope ).asCollection () ) { System.out.println ( "Group: " + group ); } } @SuppressWarnings ( "unused" ) public static void main ( final String[] args ) throws IllegalArgumentException, UnknownHostException, JIException { final TestConfiguration configuration = new MatrikonSimulationServerConfiguration (); OPCServer server = null; try { JISystem.setAutoRegisteration ( true ); _session = JISession.createSession ( args[1], args[2], args[3] ); // OPCServer server = new OPCServer ( "127.0.0.1", JIProgId.valueOf // ( session, "Matrikon.OPC.Simulation.1" ), // session ); //JIComServer comServer = new JIComServer ( JIClsid.valueOf ( configuration.getCLSID () ), args[0], _session ); final JIComServer comServer = new JIComServer ( JIProgId.valueOf ( configuration.getProgId () ), args[0], _session ); final IJIComObject serverObject = comServer.createInstance (); server = new OPCServer ( serverObject ); dumpServerStatus ( server ); /* OPCCommon common = server.getCommon (); common.setLocaleID ( 1033 ); System.out.println ( String.format ( "LCID: %d", common.getLocaleID () ) ); common.setClientName ( "test" ); for ( Integer i : common.queryAvailableLocaleIDs () ) { System.out.println ( String.format ( "Available LCID: %d", i ) ); } */ final OPCBrowseServerAddressSpace serverBrowser = server.getBrowser (); browseFlat ( serverBrowser ); /* browseTree ( serverBrowser ); */ final OPCGroupStateMgt group = server.addGroup ( "test", true, 100, 1234, 60, 0.0f, 1033 ); /* group.setName ( "test2" ); OPCGroupStateMgt group2 = group.clone ( "test" ); group = server.getGroupByName ( "test2" ); group.setState ( null, false, null, null, null, null ); group.setState ( null, true, null, null, null, null ); dumpGroupState ( group ); dumpGroupState ( group2 ); */ testItems ( server, group, configuration.getReadItems () ); if ( configuration.getWriteItems () != null ) { writeItems ( server, group, configuration.getWriteItems () ); } final OPCItemProperties itemProperties = server.getItemPropertiesService (); //dumpItemProperties ( itemProperties, "Saw-toothed Waves.Int" ); final OPCItemIO itemIO = server.getItemIOService (); //queryItems ( itemIO, "Saw-toothed Waves.Int" ); enumerateGroups ( server, OPCENUMSCOPE.OPC_ENUM_PUBLIC ); enumerateGroups ( server, OPCENUMSCOPE.OPC_ENUM_PRIVATE ); enumerateGroups ( server, OPCENUMSCOPE.OPC_ENUM_ALL ); // clean up server.removeGroup ( group, true ); //server.removeGroup ( group2, true ); } catch ( final JIException e ) { e.printStackTrace (); showError ( server, e.getErrorCode () ); } finally { if ( _session != null ) { JISession.destroySession ( _session ); } _session = null; } } }
/* * Copyright 2020, Google LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package anthos.samples.bankofanthos.ledgerwriter; import static anthos.samples.bankofanthos.ledgerwriter.ExceptionMessages.EXCEPTION_MESSAGE_DUPLICATE_TRANSACTION; import static anthos.samples.bankofanthos.ledgerwriter.ExceptionMessages.EXCEPTION_MESSAGE_INSUFFICIENT_BALANCE; import static anthos.samples.bankofanthos.ledgerwriter.ExceptionMessages.EXCEPTION_MESSAGE_WHEN_AUTHORIZATION_HEADER_NULL; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; import static org.mockito.MockitoAnnotations.initMocks; import com.auth0.jwt.JWTVerifier; import com.auth0.jwt.exceptions.JWTVerificationException; import com.auth0.jwt.interfaces.Claim; import com.auth0.jwt.interfaces.DecodedJWT; import io.micrometer.core.instrument.Clock; import io.micrometer.core.lang.Nullable; import io.micrometer.stackdriver.StackdriverConfig; import io.micrometer.stackdriver.StackdriverMeterRegistry; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestInfo; import org.mockito.Mock; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.transaction.CannotCreateTransactionException; import org.springframework.web.client.HttpServerErrorException; import org.springframework.web.client.ResourceAccessException; class LedgerWriterControllerTest { private LedgerWriterController ledgerWriterController; @Mock private TransactionValidator transactionValidator; @Mock private TransactionRepository transactionRepository; @Mock private JWTVerifier verifier; @Mock private Transaction transaction; @Mock private DecodedJWT jwt; @Mock private Claim claim; @Mock private Clock clock; private static final String VERSION = "v0.1.0"; private static final String LOCAL_ROUTING_NUM = "123456789"; private static final String NON_LOCAL_ROUTING_NUM = "987654321"; private static final String BALANCES_API_ADDR = "balancereader:8080"; private static final String AUTHED_ACCOUNT_NUM = "1234567890"; private static final String BEARER_TOKEN = "Bearer abc"; private static final String TOKEN = "abc"; private static final String EXCEPTION_MESSAGE = "Invalid variable"; private static final int SENDER_BALANCE = 40; private static final int LARGER_THAN_SENDER_BALANCE = 1000; private static final int SMALLER_THAN_SENDER_BALANCE = 10; @BeforeEach void setUp() { initMocks(this); StackdriverMeterRegistry meterRegistry = new StackdriverMeterRegistry(new StackdriverConfig() { @Override public boolean enabled() { return false; } @Override public String projectId() { return "test"; } @Override @Nullable public String get(String key) { return null; } }, clock); ledgerWriterController = new LedgerWriterController(verifier, meterRegistry, transactionRepository, transactionValidator, LOCAL_ROUTING_NUM, BALANCES_API_ADDR, VERSION); when(verifier.verify(TOKEN)).thenReturn(jwt); when(jwt.getClaim( LedgerWriterController.JWT_ACCOUNT_KEY)).thenReturn(claim); } @Test @DisplayName("Given version number in the environment, " + "return a ResponseEntity with the version number") void version() { // When final ResponseEntity actualResult = ledgerWriterController.version(); // Then assertNotNull(actualResult); assertEquals(VERSION, actualResult.getBody()); assertEquals(HttpStatus.OK, actualResult.getStatusCode()); } @Test @DisplayName("Given the server is serving requests, return HTTP Status 200") void readiness() { // When final ResponseEntity actualResult = ledgerWriterController.readiness(); // Then assertNotNull(actualResult); assertEquals(ledgerWriterController.READINESS_CODE, actualResult.getBody()); assertEquals(HttpStatus.OK, actualResult.getStatusCode()); } @Test @DisplayName("Given the transaction is external, return HTTP Status 201") void addTransactionSuccessWhenDiffThanLocalRoutingNum(TestInfo testInfo) { // Given when(transaction.getFromRoutingNum()).thenReturn(NON_LOCAL_ROUTING_NUM); when(transaction.getRequestUuid()).thenReturn(testInfo.getDisplayName()); // When final ResponseEntity actualResult = ledgerWriterController.addTransaction( BEARER_TOKEN, transaction); // Then assertNotNull(actualResult); assertEquals(ledgerWriterController.READINESS_CODE, actualResult.getBody()); assertEquals(HttpStatus.CREATED, actualResult.getStatusCode()); } @Test @DisplayName("Given the transaction is internal and the transaction amount == sender balance, " + "return HTTP Status 201") void addTransactionSuccessWhenAmountEqualToBalance(TestInfo testInfo) { // Given LedgerWriterController spyLedgerWriterController = spy(ledgerWriterController); when(transaction.getFromRoutingNum()).thenReturn(LOCAL_ROUTING_NUM); when(transaction.getFromRoutingNum()).thenReturn(AUTHED_ACCOUNT_NUM); when(transaction.getAmount()).thenReturn(SENDER_BALANCE); when(transaction.getRequestUuid()).thenReturn(testInfo.getDisplayName()); doReturn(SENDER_BALANCE).when( spyLedgerWriterController).getAvailableBalance( TOKEN, AUTHED_ACCOUNT_NUM); // When final ResponseEntity actualResult = spyLedgerWriterController.addTransaction( BEARER_TOKEN, transaction); // Then assertNotNull(actualResult); assertEquals(ledgerWriterController.READINESS_CODE, actualResult.getBody()); assertEquals(HttpStatus.CREATED, actualResult.getStatusCode()); } @Test @DisplayName("Given the transaction is internal and the transaction amount < sender balance, " + "return HTTP Status 201") void addTransactionSuccessWhenAmountSmallerThanBalance(TestInfo testInfo) { // Given LedgerWriterController spyLedgerWriterController = spy(ledgerWriterController); when(transaction.getFromRoutingNum()).thenReturn(LOCAL_ROUTING_NUM); when(transaction.getFromRoutingNum()).thenReturn(AUTHED_ACCOUNT_NUM); when(transaction.getAmount()).thenReturn(SMALLER_THAN_SENDER_BALANCE); when(transaction.getRequestUuid()).thenReturn(testInfo.getDisplayName()); doReturn(SENDER_BALANCE).when( spyLedgerWriterController).getAvailableBalance( TOKEN, AUTHED_ACCOUNT_NUM); // When final ResponseEntity actualResult = spyLedgerWriterController.addTransaction( BEARER_TOKEN, transaction); // Then assertNotNull(actualResult); assertEquals(ledgerWriterController.READINESS_CODE, actualResult.getBody()); assertEquals(HttpStatus.CREATED, actualResult.getStatusCode()); } @Test @DisplayName("Given the transaction is internal and the transaction amount > sender balance, " + "return HTTP Status 400") void addTransactionFailWhenWhenAmountLargerThanBalance(TestInfo testInfo) { // Given LedgerWriterController spyLedgerWriterController = spy(ledgerWriterController); when(transaction.getFromRoutingNum()).thenReturn(LOCAL_ROUTING_NUM); when(transaction.getFromAccountNum()).thenReturn(AUTHED_ACCOUNT_NUM); when(transaction.getAmount()).thenReturn(LARGER_THAN_SENDER_BALANCE); when(transaction.getRequestUuid()).thenReturn(testInfo.getDisplayName()); doReturn(SENDER_BALANCE).when( spyLedgerWriterController).getAvailableBalance( TOKEN, AUTHED_ACCOUNT_NUM); // When final ResponseEntity actualResult = spyLedgerWriterController.addTransaction( BEARER_TOKEN, transaction); // Then assertNotNull(actualResult); assertEquals( EXCEPTION_MESSAGE_INSUFFICIENT_BALANCE, actualResult.getBody()); assertEquals(HttpStatus.BAD_REQUEST, actualResult.getStatusCode()); } @Test @DisplayName("Given JWT verifier cannot verify the given bearer token, " + "return HTTP Status 401") void addTransactionWhenJWTVerificationExceptionThrown() { // Given when(verifier.verify(TOKEN)).thenThrow( JWTVerificationException.class); // When final ResponseEntity actualResult = ledgerWriterController.addTransaction( BEARER_TOKEN, transaction); // Then assertNotNull(actualResult); assertEquals(ledgerWriterController.UNAUTHORIZED_CODE, actualResult.getBody()); assertEquals(HttpStatus.UNAUTHORIZED, actualResult.getStatusCode()); } @Test @DisplayName("Given exception thrown on validation, return HTTP Status 400") void addTransactionWhenIllegalArgumentExceptionThrown() { // Given when(claim.asString()).thenReturn(AUTHED_ACCOUNT_NUM); doThrow(new IllegalArgumentException(EXCEPTION_MESSAGE)). when(transactionValidator).validateTransaction( LOCAL_ROUTING_NUM, AUTHED_ACCOUNT_NUM, transaction); // When final ResponseEntity actualResult = ledgerWriterController.addTransaction( BEARER_TOKEN, transaction); // Then assertNotNull(actualResult); assertEquals(EXCEPTION_MESSAGE, actualResult.getBody()); assertEquals(HttpStatus.BAD_REQUEST, actualResult.getStatusCode()); } @Test @DisplayName("Given HTTP request 'Authorization' header is null, " + "return HTTP Status 400") void addTransactionWhenBearerTokenNull() { // When final ResponseEntity actualResult = ledgerWriterController.addTransaction( null, transaction); // Then assertNotNull(actualResult); assertEquals(EXCEPTION_MESSAGE_WHEN_AUTHORIZATION_HEADER_NULL, actualResult.getBody()); assertEquals(HttpStatus.BAD_REQUEST, actualResult.getStatusCode()); } @Test @DisplayName("Given the transaction is internal, check available balance and the balance " + "reader throws an error, return HTTP Status 500") void addTransactionWhenResourceAccessExceptionThrown(TestInfo testInfo) { // Given LedgerWriterController spyLedgerWriterController = spy(ledgerWriterController); when(transaction.getFromRoutingNum()).thenReturn(LOCAL_ROUTING_NUM); when(transaction.getFromAccountNum()).thenReturn(AUTHED_ACCOUNT_NUM); when(transaction.getRequestUuid()).thenReturn(testInfo.getDisplayName()); doThrow(new ResourceAccessException(EXCEPTION_MESSAGE)).when( spyLedgerWriterController).getAvailableBalance( TOKEN, AUTHED_ACCOUNT_NUM); // When final ResponseEntity actualResult = spyLedgerWriterController.addTransaction( BEARER_TOKEN, transaction); // Then assertNotNull(actualResult); assertEquals(EXCEPTION_MESSAGE, actualResult.getBody()); assertEquals(HttpStatus.INTERNAL_SERVER_ERROR, actualResult.getStatusCode()); } @Test @DisplayName("Given the transaction is external and the transaction cannot be saved to the " + "transaction repository, return HTTP Status 500") void addTransactionWhenCannotCreateTransactionExceptionExceptionThrown(TestInfo testInfo) { // Given when(transaction.getFromRoutingNum()).thenReturn(NON_LOCAL_ROUTING_NUM); when(transaction.getRequestUuid()).thenReturn(testInfo.getDisplayName()); doThrow(new CannotCreateTransactionException(EXCEPTION_MESSAGE)).when( transactionRepository).save(transaction); // When final ResponseEntity actualResult = ledgerWriterController.addTransaction( TOKEN, transaction); // Then assertNotNull(actualResult); assertEquals(EXCEPTION_MESSAGE, actualResult.getBody()); assertEquals(HttpStatus.INTERNAL_SERVER_ERROR, actualResult.getStatusCode()); } @Test @DisplayName("Given the transaction is internal, check available balance and the balance " + "service returns 500, return HTTP Status 500") void addTransactionWhenHttpServerErrorExceptionThrown(TestInfo testInfo) { // Given LedgerWriterController spyLedgerWriterController = spy(ledgerWriterController); when(transaction.getFromRoutingNum()).thenReturn(LOCAL_ROUTING_NUM); when(transaction.getFromAccountNum()).thenReturn(AUTHED_ACCOUNT_NUM); when(transaction.getRequestUuid()).thenReturn(testInfo.getDisplayName()); doThrow(new HttpServerErrorException( HttpStatus.INTERNAL_SERVER_ERROR)).when( spyLedgerWriterController).getAvailableBalance( TOKEN, AUTHED_ACCOUNT_NUM); // When final ResponseEntity actualResult = spyLedgerWriterController.addTransaction( BEARER_TOKEN, transaction); // Then assertNotNull(actualResult); assertEquals(HttpStatus.INTERNAL_SERVER_ERROR.toString(), actualResult.getBody()); assertEquals(HttpStatus.INTERNAL_SERVER_ERROR, actualResult.getStatusCode()); } @Test @DisplayName("When duplicate UUID transactions are sent, " + "second one is rejected with HTTP status 400") void addTransactionWhenDuplicateUuidExceptionThrown(TestInfo testInfo) { // Given LedgerWriterController spyLedgerWriterController = spy(ledgerWriterController); when(transaction.getFromRoutingNum()).thenReturn(LOCAL_ROUTING_NUM); when(transaction.getFromRoutingNum()).thenReturn(AUTHED_ACCOUNT_NUM); when(transaction.getAmount()).thenReturn(SMALLER_THAN_SENDER_BALANCE); when(transaction.getRequestUuid()).thenReturn(testInfo.getDisplayName()); doReturn(SENDER_BALANCE).when( spyLedgerWriterController).getAvailableBalance( TOKEN, AUTHED_ACCOUNT_NUM); // When final ResponseEntity originalResult = spyLedgerWriterController.addTransaction( BEARER_TOKEN, transaction); final ResponseEntity duplicateResult = spyLedgerWriterController.addTransaction( BEARER_TOKEN, transaction); // Then assertNotNull(originalResult); assertEquals(ledgerWriterController.READINESS_CODE, originalResult.getBody()); assertEquals(HttpStatus.CREATED, originalResult.getStatusCode()); assertNotNull(duplicateResult); assertEquals( EXCEPTION_MESSAGE_DUPLICATE_TRANSACTION, duplicateResult.getBody()); assertEquals(HttpStatus.BAD_REQUEST, duplicateResult.getStatusCode()); } }
/* * #%L * SCIFIO library for reading and converting scientific file formats. * %% * Copyright (C) 2011 - 2021 SCIFIO developers. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package io.scif; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.List; /** * Abstract superclass of all SCIFIO {@link io.scif.Format} implementations. * * @see io.scif.Format * @see io.scif.Metadata * @see io.scif.Parser * @see io.scif.Reader * @see io.scif.Writer * @see io.scif.Checker * @see io.scif.services.FormatService * @author Mark Hiner */ public abstract class AbstractFormat extends AbstractSCIFIOPlugin implements Format { // -- Fields -- /** Valid suffixes for this file format. */ private String[] suffixes; private boolean enabled = true; // Class references to the components of this Format private Class<? extends Metadata> metadataClass; private Class<? extends Checker> checkerClass; private Class<? extends Parser> parserClass; private Class<? extends Reader> readerClass; private Class<? extends Writer> writerClass; // -- Constructor -- public AbstractFormat() { metadataClass = DefaultMetadata.class; checkerClass = DefaultChecker.class; parserClass = DefaultParser.class; readerClass = DefaultReader.class; writerClass = DefaultWriter.class; updateCustomClasses(); } // -- AbstractFormat Methods -- /** * Helper method to cache the suffix array for a format. Concrete format * classes should implement this method, returning an array of supported * suffixes. * * @return Valid suffixes for this file format. */ protected abstract String[] makeSuffixArray(); // -- Format API Methods -- @Override public String[] getSuffixes() { if (suffixes == null) { suffixes = makeSuffixArray(); } return suffixes; } @Override public void setEnabled(final boolean enabled) { this.enabled = enabled; } @Override public boolean isEnabled() { return enabled; } @Override public String getFormatName() { return getInfo().getName(); } @Override public Metadata createMetadata() throws FormatException { return createContextualObject(getMetadataClass()); } @Override public Checker createChecker() throws FormatException { return createContextualObject(getCheckerClass()); } @Override public Parser createParser() throws FormatException { return createContextualObject(getParserClass()); } @Override public Reader createReader() throws FormatException { return createContextualObject(getReaderClass()); } @Override public Writer createWriter() throws FormatException { return createContextualObject(getWriterClass()); } @Override public Class<? extends Metadata> getMetadataClass() { return metadataClass; } @Override public Class<? extends Checker> getCheckerClass() { return checkerClass; } @Override public Class<? extends Parser> getParserClass() { return parserClass; } @Override public Class<? extends Reader> getReaderClass() { return readerClass; } @Override public Class<? extends Writer> getWriterClass() { return writerClass; } // -- Helper Methods -- /* * Creates a SCIFIO component from its class. Also sets its context based on * this format's context. */ private <T extends HasFormat> T createContextualObject(final Class<T> c) throws FormatException { final T t = createObject(c); t.setContext(getContext()); // if we are creating a Default component, we need to // manually set its Format. if (DefaultComponent.class.isAssignableFrom(t.getClass())) { try { final java.lang.reflect.Field fmt = t.getClass().getDeclaredField( "format"); fmt.setAccessible(true); fmt.set(t, this); } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { throw new FormatException(// "Failed to populate DefaultComponent field", e); } } return t; } /* * Returns an instance of an object from its Class */ private <T extends HasFormat> T createObject(final Class<T> c) throws FormatException { try { return c.newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new FormatException(e); } } /* * Overrides the default classes with declared custom components. */ @SuppressWarnings("unchecked") private void updateCustomClasses() { for (final Class<?> c : buildClassList()) { if ((c.getModifiers() & Modifier.ABSTRACT) != 0) continue; if (Metadata.class.isAssignableFrom(c)) metadataClass = (Class<? extends Metadata>) c; else if (Checker.class.isAssignableFrom(c)) checkerClass = (Class<? extends Checker>) c; else if (Parser.class.isAssignableFrom(c)) parserClass = (Class<? extends Parser>) c; else if (Reader.class.isAssignableFrom(c)) readerClass = (Class<? extends Reader>) c; else if (Writer.class.isAssignableFrom(c)) writerClass = (Class<? extends Writer>) c; } } /* * Searches for all nested classes within this class and recursively adds * them to a complete class list. */ private List<Class<?>> buildClassList() { final Class<?>[] classes = this.getClass().getDeclaredClasses(); final List<Class<?>> classList = new ArrayList<>(); for (final Class<?> c : classes) { check(c, classList); } return classList; } /* * Recursive method to add a class, and all nested classes declared in that * class, to the provided list of classes. */ private void check(final Class<?> newClass, final List<Class<?>> classList) { classList.add(newClass); for (final Class<?> c : newClass.getDeclaredClasses()) check(c, classList); } }
package slacknotifications.teamcity; import static org.junit.Assert.*; import static org.mockito.Matchers.isA; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import jetbrains.buildServer.messages.Status; import jetbrains.buildServer.serverSide.*; import jetbrains.buildServer.serverSide.settings.ProjectSettingsManager; import org.apache.http.ProtocolVersion; import org.apache.http.client.HttpClient; import org.apache.http.client.methods.HttpUriRequest; import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHttpResponse; import org.apache.http.message.BasicStatusLine; import org.junit.*; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; import slacknotifications.SlackNotification; import slacknotifications.SlackNotificationImpl; import slacknotifications.teamcity.payload.SlackNotificationPayloadManager; import slacknotifications.teamcity.payload.content.PostMessageResponse; import slacknotifications.teamcity.settings.SlackNotificationMainSettings; import slacknotifications.teamcity.settings.SlackNotificationProjectSettings; import slacknotifications.teamcity.settings.SlackNotificationConfig; public class SlackNotificationListenerTest { SBuildServer sBuildServer = mock(SBuildServer.class); BuildHistory buildHistory = mock(BuildHistory.class); ProjectManager projectManager = mock(ProjectManager.class); ProjectSettingsManager settings = mock(ProjectSettingsManager.class); SlackNotificationMainSettings configSettings = mock(SlackNotificationMainSettings.class); SlackNotificationPayloadManager manager = mock(SlackNotificationPayloadManager.class); // SlackNotificationPayload payload = new SlackNotificationPayloadDetailed(manager); SlackNotificationProjectSettings projSettings; SlackNotificationFactory factory = mock(SlackNotificationFactory.class); SlackNotification slacknotification = mock (SlackNotification.class); SlackNotification slackNotificationImpl; SlackNotification spySlackNotification; SFinishedBuild previousSuccessfulBuild = mock(SFinishedBuild.class); SFinishedBuild previousFailedBuild = mock(SFinishedBuild.class); List<SFinishedBuild> finishedSuccessfulBuilds = new ArrayList<SFinishedBuild>(); List<SFinishedBuild> finishedFailedBuilds = new ArrayList<SFinishedBuild>(); MockSBuildType sBuildType = new MockSBuildType("Test Build", "A Test Build", "bt1"); MockSRunningBuild sRunningBuild = new MockSRunningBuild(sBuildType, "SubVersion", Status.NORMAL, "Running", "TestBuild01"); MockSProject sProject = new MockSProject("Test Project", "A test project", "project1", "ATestProject", sBuildType); SlackNotificationListener whl; @After @Before public void deleteSlackConfigFile(){ DeleteConfigFiles(); } private void DeleteConfigFiles() { File outputFile = new File("slack", "slack-config.xml"); outputFile.delete(); File outputDir = new File("slack"); outputDir.delete(); } @BeforeClass public static void setUpBeforeClass() throws Exception { } @AfterClass public static void tearDownAfterClass() throws Exception { } @Before public void setUp() throws Exception { HttpClient httpClient = mock(HttpClient.class); BasicHttpResponse response = new BasicHttpResponse(new BasicStatusLine(new ProtocolVersion("http", 1, 1), 200, "")); PostMessageResponse successfulResponse = new PostMessageResponse(); successfulResponse.setOk(true); successfulResponse.setError("channel_not_found"); response.setEntity(new StringEntity(successfulResponse.toJson())); when(httpClient.execute(isA(HttpUriRequest.class))).thenReturn(response); slackNotificationImpl = new SlackNotificationImpl(httpClient, ""); spySlackNotification = spy(slackNotificationImpl); whl = new SlackNotificationListener(sBuildServer, settings, configSettings, manager, factory); projSettings = new SlackNotificationProjectSettings(); when(factory.getSlackNotification()).thenReturn(spySlackNotification); //when(manager.isRegisteredFormat("JSON")).thenReturn(true); // when(manager.getFormat("JSON")).thenReturn(payload); //when(manager.getServer()).thenReturn(sBuildServer); when(sBuildServer.getProjectManager()).thenReturn(projectManager); when(projectManager.findProjectById("project1")).thenReturn(sProject); when(sBuildServer.getHistory()).thenReturn(buildHistory); when(sBuildServer.getRootUrl()).thenReturn("http://test.server"); when(previousSuccessfulBuild.getBuildStatus()).thenReturn(Status.NORMAL); when(previousSuccessfulBuild.isPersonal()).thenReturn(false); when(previousFailedBuild.getBuildStatus()).thenReturn(Status.FAILURE); when(previousFailedBuild.isPersonal()).thenReturn(false); finishedSuccessfulBuilds.add(previousSuccessfulBuild); finishedFailedBuilds.add(previousFailedBuild); sBuildType.setProject(sProject); when(settings.getSettings(sRunningBuild.getProjectId(), "slackNotifications")).thenReturn(projSettings); whl.register(); } @After public void tearDown() throws Exception { } @SuppressWarnings("unused") @Test public void testSlackNotificationListener() { SlackNotificationListener whl = new SlackNotificationListener(sBuildServer, settings,configSettings, manager, factory); } @Test public void testRegister() { SlackNotificationListener whl = new SlackNotificationListener(sBuildServer, settings,configSettings, manager, factory); whl.register(); verify(sBuildServer).addListener(whl); } @Test public void testGetFromConfig() { String expectedConfigDirectory = "."; ServerPaths serverPaths = mock(ServerPaths.class); when(serverPaths.getConfigDir()).thenReturn(expectedConfigDirectory); BuildState buildState = new BuildState(); SlackNotificationMainSettings mainSettings = new SlackNotificationMainSettings(sBuildServer, serverPaths); mainSettings.readFrom(getFullConfigElement()); SlackNotificationConfig config = new SlackNotificationConfig("", "#general", "teamName", "<default>", true, buildState, true, true, null, true, true, true, true); SlackNotificationListener whl = new SlackNotificationListener(sBuildServer, settings, mainSettings, manager, factory); whl.getFromConfig(slackNotificationImpl, config); assertEquals("myproxy.mycompany.com", slackNotificationImpl.getProxyHost()); assertEquals(8080, slackNotificationImpl.getProxyPort()); } private Element getFullConfigElement(){ return getElement("src/test/resources/main-config-full.xml"); } private Element getElement(String filePath){ SAXBuilder builder = new SAXBuilder(); builder.setIgnoringElementContentWhitespace(true); try { Document doc = builder.build(filePath); return doc.getRootElement(); } catch (JDOMException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } @Test public void testBuildStartedSRunningBuild() throws FileNotFoundException, IOException { BuildState state = new BuildState().setAllEnabled(); projSettings.addNewSlackNotification("", "project1", "my-channel", "myteam", "", true, state, true, true, new HashSet<String>(), true, true, true, true); when(slacknotification.isEnabled()).thenReturn(state.allEnabled()); when(buildHistory.getEntriesBefore(sRunningBuild, false)).thenReturn(finishedSuccessfulBuilds); whl.buildStarted(sRunningBuild); verify(factory.getSlackNotification(), times(1)).post(); } @Test public void testBuildFinishedSRunningBuild() throws FileNotFoundException, IOException { BuildState state = new BuildState().setAllEnabled(); projSettings.addNewSlackNotification("", "1234", "my-channel", "myteam", "", true, state , true, true, new HashSet<String>(), true, true, true, true); when(slacknotification.isEnabled()).thenReturn(state.allEnabled()); when(buildHistory.getEntriesBefore(sRunningBuild, false)).thenReturn(finishedSuccessfulBuilds); whl.buildFinished(sRunningBuild); verify(factory.getSlackNotification(), times(1)).post(); } @Test public void testBuildFinishedSRunningBuildSuccessAfterFailure() throws FileNotFoundException, IOException { BuildState state = new BuildState(); state.enable(BuildStateEnum.BUILD_FIXED); state.enable(BuildStateEnum.BUILD_FINISHED); state.enable(BuildStateEnum.BUILD_SUCCESSFUL); projSettings.addNewSlackNotification("", "1234", "my-channel", "myteam", null, true, state, true, true, new HashSet<String>(), true, true, true, true); when(slacknotification.isEnabled()).thenReturn(state.enabled(BuildStateEnum.BUILD_FIXED)); when(buildHistory.getEntriesBefore(sRunningBuild, false)).thenReturn(finishedFailedBuilds); whl.buildFinished(sRunningBuild); verify(factory.getSlackNotification(), times(1)).post(); } @Test public void testBuildFinishedSRunningBuildSuccessAfterSuccess() throws FileNotFoundException, IOException { BuildState state = new BuildState(); state.enable(BuildStateEnum.BUILD_FIXED); projSettings.addNewSlackNotification("", "1234", "my-channel", "myteam", null, true, state, true, true, new HashSet<String>(), true, true, true, true); when(slacknotification.isEnabled()).thenReturn(state.enabled(BuildStateEnum.BUILD_FIXED)); when(buildHistory.getEntriesBefore(sRunningBuild, false)).thenReturn(finishedSuccessfulBuilds); whl.buildFinished(sRunningBuild); verify(factory.getSlackNotification(), times(0)).post(); } @Test public void testBuildInterruptedSRunningBuild() throws FileNotFoundException, IOException { BuildState state = new BuildState().setAllEnabled(); projSettings.addNewSlackNotification("", "1234", "my-channel", "myteam", null, true, state, true, true, new HashSet<String>(), true, true, true, true); when(buildHistory.getEntriesBefore(sRunningBuild, false)).thenReturn(finishedSuccessfulBuilds); whl.buildInterrupted(sRunningBuild); verify(factory.getSlackNotification(), times(1)).post(); } @Test public void testBeforeBuildFinishSRunningBuild() throws FileNotFoundException, IOException { BuildState state = new BuildState(); state.enable(BuildStateEnum.BEFORE_BUILD_FINISHED); projSettings.addNewSlackNotification("", "1234", "my-channel", "myteam", null, true, state, true, true, new HashSet<String>(), true, true, true, true); when(buildHistory.getEntriesBefore(sRunningBuild, false)).thenReturn(finishedSuccessfulBuilds); whl.beforeBuildFinish(sRunningBuild); verify(factory.getSlackNotification(), times(1)).post(); } @Test public void testBuildChangedStatusSRunningBuildStatusStatus() throws FileNotFoundException, IOException { MockSBuildType sBuildType = new MockSBuildType("Test Build", "A Test Build", "bt1"); sBuildType.setProject(sProject); String triggeredBy = "SubVersion"; MockSRunningBuild sRunningBuild = new MockSRunningBuild(sBuildType, triggeredBy, Status.NORMAL, "Running", "TestBuild01"); when(settings.getSettings(sRunningBuild.getProjectId(), "slackNotifications")).thenReturn(projSettings); MockSProject sProject = new MockSProject("Test Project", "A test project", "project1", "ATestProject", sBuildType); sBuildType.setProject(sProject); SlackNotificationListener whl = new SlackNotificationListener(sBuildServer, settings,configSettings, manager, factory); Status oldStatus = Status.NORMAL; Status newStatus = Status.FAILURE; whl.register(); whl.buildChangedStatus(sRunningBuild, oldStatus, newStatus); verify(factory.getSlackNotification(), times(0)).post(); } // @Test // public void testResponsibleChangedSBuildTypeResponsibilityInfoResponsibilityInfoBoolean() { // // } }
/* * Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.util; import com.hazelcast.internal.eviction.Expirable; import com.hazelcast.internal.util.ThreadLocalRandomProvider; import java.util.AbstractMap; import java.util.Collections; import java.util.EnumSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; /** * ConcurrentHashMap to extend iterator capability. * * @param <K> Type of the key * @param <V> Type of the value */ public class SampleableConcurrentHashMap<K, V> extends ConcurrentReferenceHashMap<K, V> { private static final float LOAD_FACTOR = 0.91f; public SampleableConcurrentHashMap(int initialCapacity) { // Concurrency level 1 is important for fetch-method to function properly. // Moreover partitions are single threaded and higher concurrency has not much gain this(initialCapacity, LOAD_FACTOR, 1, ReferenceType.STRONG, ReferenceType.STRONG, null); } public SampleableConcurrentHashMap(int initialCapacity, float loadFactor, int concurrencyLevel, ReferenceType keyType, ReferenceType valueType, EnumSet<Option> options) { super(initialCapacity, loadFactor, concurrencyLevel, keyType, valueType, options); } /** * Fetches keys from given <code>tableIndex</code> as <code>size</code> * and puts them into <code>keys</code> list. * * @param tableIndex Index (checkpoint) for starting point of fetch operation * @param size Count of how many keys will be fetched * @param keys List that fetched keys will be put into * @return the next index (checkpoint) for later fetches */ public int fetchKeys(int tableIndex, int size, List<K> keys) { final long now = Clock.currentTimeMillis(); final Segment<K, V> segment = segments[0]; final HashEntry<K, V>[] currentTable = segment.table; int nextTableIndex; if (tableIndex >= 0 && tableIndex < segment.table.length) { nextTableIndex = tableIndex; } else { nextTableIndex = currentTable.length - 1; } int counter = 0; while (nextTableIndex >= 0 && counter < size) { HashEntry<K, V> nextEntry = currentTable[nextTableIndex--]; while (nextEntry != null) { if (nextEntry.key() != null) { final V value = nextEntry.value(); if (isValidForFetching(value, now)) { keys.add(nextEntry.key()); counter++; } } nextEntry = nextEntry.next; } } return nextTableIndex; } /** * Fetches entries from given <code>tableIndex</code> as <code>size</code> * and puts them into <code>entries</code> list. * * @param tableIndex Index (checkpoint) for starting point of fetch operation * @param size Count of how many entries will be fetched * @param entries List that fetched entries will be put into * @return the next index (checkpoint) for later fetches */ public int fetchEntries(int tableIndex, int size, List<Map.Entry<K, V>> entries) { final long now = Clock.currentTimeMillis(); final Segment<K, V> segment = segments[0]; final HashEntry<K, V>[] currentTable = segment.table; int nextTableIndex; if (tableIndex >= 0 && tableIndex < segment.table.length) { nextTableIndex = tableIndex; } else { nextTableIndex = currentTable.length - 1; } int counter = 0; while (nextTableIndex >= 0 && counter < size) { HashEntry<K, V> nextEntry = currentTable[nextTableIndex--]; while (nextEntry != null) { if (nextEntry.key() != null) { final V value = nextEntry.value(); if (isValidForFetching(value, now)) { K key = nextEntry.key(); entries.add(new AbstractMap.SimpleEntry<K, V>(key, value)); counter++; } } nextEntry = nextEntry.next; } } return nextTableIndex; } protected boolean isValidForFetching(V value, long now) { if (value instanceof Expirable) { return !((Expirable) value).isExpiredAt(now); } return true; } /** * Entry to define keys and values for sampling. */ public static class SamplingEntry<K, V> { protected final K key; protected final V value; public SamplingEntry(K key, V value) { this.key = key; this.value = value; } public K getEntryKey() { return key; } public V getEntryValue() { return value; } @Override public boolean equals(Object o) { if (!(o instanceof SamplingEntry)) { return false; } @SuppressWarnings("unchecked") SamplingEntry e = (SamplingEntry) o; return eq(key, e.key) && eq(value, e.value); } private static boolean eq(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } @Override public int hashCode() { return (key == null ? 0 : key.hashCode()) ^ (value == null ? 0 : value.hashCode()); } public String toString() { return key + "=" + value; } } protected <E extends SamplingEntry> E createSamplingEntry(K key, V value) { return (E) new SamplingEntry(key, value); } /** * Gets and returns samples as <code>sampleCount</code>. * * @param sampleCount Count of samples * @return the sampled {@link SamplingEntry} list */ public <E extends SamplingEntry> Iterable<E> getRandomSamples(int sampleCount) { if (sampleCount < 0) { throw new IllegalArgumentException("Sample count cannot be a negative value."); } if (sampleCount == 0 || size() == 0) { return Collections.EMPTY_LIST; } return new LazySamplingEntryIterableIterator<E>(sampleCount); } /** * This class is implements both of "Iterable" and "Iterator" interfaces. * So we can use only one object (instead of two) both for "Iterable" and "Iterator" interfaces. * * NOTE: Assumed that it is not accessed by multiple threads. So there is no synchronization. */ private final class LazySamplingEntryIterableIterator<E extends SamplingEntry> implements Iterable<E>, Iterator<E> { private final int maxEntryCount; private final int randomNumber; private final int firstSegmentIndex; private int currentSegmentIndex; private int currentBucketIndex; private HashEntry<K, V> currentEntry; private int returnedEntryCount; private boolean reachedToEnd; private E currentSample; private LazySamplingEntryIterableIterator(int maxEntryCount) { this.maxEntryCount = maxEntryCount; this.randomNumber = ThreadLocalRandomProvider.get().nextInt(Integer.MAX_VALUE); this.firstSegmentIndex = randomNumber % segments.length; this.currentSegmentIndex = firstSegmentIndex; this.currentBucketIndex = -1; } @Override public Iterator<E> iterator() { return this; } /** * Originally taken by Jaromir Hamala's implementation and changed as incremental implementation. * So kudos to Jaromir :) */ //CHECKSTYLE:OFF private void iterate() { if (returnedEntryCount >= maxEntryCount || reachedToEnd) { currentSample = null; return; } do { Segment<K, V> segment = segments[currentSegmentIndex]; if (segment != null) { HashEntry<K, V>[] table = segment.table; // Pick up a starting point int firstBucketIndex = randomNumber % table.length; // If current bucket index is not initialized yet, initialize it with starting point if (currentBucketIndex == -1) { currentBucketIndex = firstBucketIndex; } do { // If current entry is not initialized yet, initialize it if (currentEntry == null) { currentEntry = table[currentBucketIndex]; } while (currentEntry != null) { V value = currentEntry.value(); K key = currentEntry.key(); // Advance to next entry currentEntry = currentEntry.next; if (isValidForSampling(value)) { currentSample = createSamplingEntry(key, value); // If we reached end of entries, advance current bucket index if (currentEntry == null) { currentBucketIndex = ++currentBucketIndex < table.length ? currentBucketIndex : 0; } returnedEntryCount++; return; } } // Advance current bucket index currentBucketIndex = ++currentBucketIndex < table.length ? currentBucketIndex : 0; // Clear current entry index to initialize at next bucket currentEntry = null; } while (currentBucketIndex != firstBucketIndex); } // Advance current segment index currentSegmentIndex = ++currentSegmentIndex < segments.length ? currentSegmentIndex : 0; // Clear current bucket index to initialize at next segment currentBucketIndex = -1; // Clear current entry index to initialize at next segment currentEntry = null; } while (currentSegmentIndex != firstSegmentIndex); reachedToEnd = true; currentSample = null; } //CHECKSTYLE:ON @Override public boolean hasNext() { iterate(); return currentSample != null; } @Override public E next() { if (currentSample != null) { return currentSample; } else { throw new NoSuchElementException(); } } @Override public void remove() { throw new UnsupportedOperationException("Removing is not supported"); } } protected boolean isValidForSampling(V value) { return value != null; } }
package org.pescuma.gitstats; import static java.lang.Math.*; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.text.DateFormat; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.Map; import java.util.logging.LogManager; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.eclipse.jgit.api.errors.GitAPIException; import org.kohsuke.args4j.CmdLineException; import org.kohsuke.args4j.CmdLineParser; import org.pescuma.datatable.DataTable; import org.pescuma.datatable.DataTable.Line; import org.pescuma.datatable.DataTableSerialization; import org.pescuma.datatable.MemoryDataTable; import org.pescuma.datatable.func.Function2; import org.pescuma.gitstats.ColumnsOutput.Align; import com.google.common.base.Function; import com.google.common.base.Predicate; import com.google.common.io.Files; public class Main { public static void main(String[] args) throws IOException, GitAPIException, InterruptedException { disableLogger(); Args parsedArgs = new Args(); CmdLineParser parser = new CmdLineParser(parsedArgs); try { parser.parseArgument(args); } catch (CmdLineException e) { System.out.println(e.getMessage()); parser.printUsage(System.out); System.out.println(); System.exit(-1); } if (parsedArgs.showHelp) { System.out.println("git stats [options...] arguments..."); parser.printUsage(System.out); System.out.println(); return; } System.exit(run(parsedArgs)); } private static void disableLogger() { LogManager.getLogManager().reset(); } private static int run(Args args) throws IOException, GitAPIException, InterruptedException { args.applyDefaults(); DataTable data = new MemoryDataTable(); for (File path : args.paths) { if (!path.exists()) System.err.println("File/folder not found: " + path); else if (path.isFile() && path.getName().endsWith(".csv")) loadFromCSV(data, args, path); else RepositoryProcessor.process(data, args, path); } System.out.println(); if (data.isEmpty()) { System.out.println("No data available"); return -1; } for (String output : args.outputs) { if (Args.isConsole(output)) outputStatsToConsole(data, args); else if (output.endsWith(".csv")) outputStatsToCSV(data, output); else if (output.endsWith(".htm") || output.endsWith(".html")) outputStatsToHTML(data, output); else System.out.println("Unknown output format: " + output); } return 0; } private static void loadFromCSV(DataTable data, final Args args, File file) { System.out.println("Loading " + file.getAbsolutePath() + "..."); DataTable loaded = new MemoryDataTable(); DataTableSerialization.loadFromCSV(loaded, file); if (!args.excludedPaths.isEmpty()) { final List<String> excludedPaths = preProcessExcludedPaths(args); loaded = loaded.filter(Consts.COL_FILE, new Predicate<String>() { @Override public boolean apply(String file) { for (String excluded : excludedPaths) { if (file.startsWith(excluded)) return false; } return true; } }); } if (!args.ignoredRevisions.isEmpty()) { loaded = loaded.filter(Consts.COL_COMMIT, new Predicate<String>() { @Override public boolean apply(String commit) { for (String rev : args.ignoredRevisions) { if (StringUtils.startsWithIgnoreCase(commit, rev)) return false; } return true; } }); } if (!args.authors.isEmpty()) { final Map<String, String> authorMappings = args.getAuthorMappings(); loaded = loaded.mapColumn(Consts.COL_AUTHOR, new Function<String, String>() { @Override public String apply(String author) { String newAuthor = authorMappings.get(author); if (newAuthor != null) return newAuthor; else return author; } }); } if (!args.languages.isEmpty()) { final Map<String, String> languageMappings = args.getLanguageMappings(); loaded = loaded.mapColumn(Consts.COL_LANGUAGE, new Function2<String, String, Line>() { @Override public String apply(String language, Line line) { String extension = FilenameUtils.getExtension(line.getColumn(Consts.COL_FILE)); String newLanguage = languageMappings.get(extension); if (newLanguage != null) return newLanguage; else return language; } }); } data.inc(loaded); } private static void outputStatsToCSV(DataTable data, String output) { System.out.println("Writing CSV output to " + output); DataTableSerialization.saveAsCSV(data, new File(output), false); System.out.println(); } private static void outputStatsToHTML(DataTable data, String output) throws IOException { System.out.println("Writing HTML output to " + output); StringBuilder lines = new StringBuilder(); for (Line line : data.getLines()) { lines.append(" data.add(").append(line.getValue()); for (String col : line.getColumns()) lines.append(", '").append(col.replace("'", "\\'")).append("'"); lines.append(");\n"); } String html = readIndexHtml(); html = html.replace("$$$date$$$", DateFormat.getDateTimeInstance().format(new Date())); html = html.replace("$$$version$$$", getVersion()); html = html.replace("$$$data$$$", lines.toString()); Files.write(html, new File(output), Charset.forName("UTF-8")); System.out.println(); } private static String getVersion() { String version = Main.class.getPackage().getImplementationVersion(); if (version == null) version = "devel"; return version; } private static String readIndexHtml() throws IOException { InputStream in = Main.class.getResourceAsStream("/org/pescuma/gitstats/export/index.html"); try { return IOUtils.toString(in, "UTF-8"); } finally { in.close(); } } private static void outputStatsToConsole(DataTable data, final Args args) { double totalLines = data.sum(); System.out.println("Total:"); ColumnsOutput out = new ColumnsOutput(); { out.appendColumn(" "); appendLines(out, data); appendFiles(out, data); appendLanguages(out, data); appendCommits(out, data); appendAuthors(out, data); appendMonths(out, data); appendUnblamable(out, data); } out.print(System.out); System.out.println(); System.out.println("Authors:"); out = new ColumnsOutput(); for (String author : sortByLines(data, Consts.COL_AUTHOR)) { if (author.isEmpty()) continue; DataTable authorData = data.filter(Consts.COL_AUTHOR, author); double authorLines = authorData.sum(); out.appendColumn(" ").appendColumn(author).appendColumn(" : ") .appendColumn(Align.Right, "%.1f%%", percent(authorLines, totalLines)) .appendColumn(" of the lines: "); appendLines(out, authorData, authorLines); appendFiles(out, authorData); appendLanguages(out, authorData); appendCommits(out, authorData); appendMonths(out, authorData); out.newLine(); } { DataTable unblamableData = data.filter(Consts.COL_AUTHOR, ""); double unblamableLines = unblamableData.sum(); if (unblamableLines > 0) { out.appendColumn(" ").appendColumn("Unblamable lines").appendColumn(" : ") .appendColumn(Align.Right, "%.1f%%", percent(unblamableLines, totalLines)) .appendColumn(" of the lines: "); appendLines(out, unblamableData, unblamableLines); appendFiles(out, unblamableData); appendLanguages(out, unblamableData); out.newLine(); } } out.print(System.out); System.out.println(); System.out.print("Months: "); out = new ColumnsOutput(); List<Double> perMonthLines = new ArrayList<Double>(); for (String month : sortByText(data.getDistinct(Consts.COL_MONTH))) { if (month.isEmpty()) { perMonthLines.add(0d); continue; } DataTable monthData = data.filter(Consts.COL_MONTH, month); double monthLines = monthData.sum(); perMonthLines.add(monthLines); out.appendColumn(" ").appendColumn(month).appendColumn(" : "); appendLines(out, monthData, monthLines); appendFiles(out, monthData); appendLanguages(out, monthData); appendCommits(out, monthData); appendAuthors(out, monthData); out.newLine(); } System.out.println(Sparkline.getSparkline(perMonthLines)); out.print(System.out); System.out.println(); System.out.println("Languages:"); out = new ColumnsOutput(); for (String language : sortByLines(data, Consts.COL_LANGUAGE)) { DataTable languageData = data.filter(Consts.COL_LANGUAGE, language); out.appendColumn(" ").appendColumn(language).appendColumn(" : "); appendLines(out, languageData); appendFiles(out, languageData); appendCommits(out, languageData); appendAuthors(out, languageData); appendMonths(out, languageData); appendUnblamable(out, languageData); out.newLine(); } out.print(System.out); System.out.println(); } private static void appendLanguages(ColumnsOutput out, DataTable data) { out.appendColumn(" in ").appendColumn(data.getDistinct(Consts.COL_LANGUAGE).size()).appendColumn(" languages"); } private static void appendFiles(ColumnsOutput out, DataTable data) { out.appendColumn(" in ").appendColumn(data.getDistinct(Consts.COL_FILE).size()).appendColumn(" files"); } private static void appendLines(ColumnsOutput out, DataTable data) { appendLines(out, data, data.sum()); } private static void appendLines(ColumnsOutput out, DataTable data, double total) { out.appendColumn((int) total).appendColumn(" lines (") .appendColumn((int) data.filter(Consts.COL_LINE_TYPE, Consts.CODE).sum()).appendColumn(" code, ") .appendColumn((int) data.filter(Consts.COL_LINE_TYPE, Consts.COMMENT).sum()).appendColumn(" comment, ") .appendColumn((int) data.filter(Consts.COL_LINE_TYPE, Consts.EMPTY).sum()).appendColumn(" empty)"); } private static void appendCommits(ColumnsOutput out, DataTable data) { out.appendColumn(" in ").appendColumn(data.getDistinct(Consts.COL_COMMIT).size()).appendColumn(" commits"); } private static void appendAuthors(ColumnsOutput out, DataTable data) { out.appendColumn(" by ").appendColumn(data.getDistinct(Consts.COL_AUTHOR).size()).appendColumn(" authors"); } private static void appendMonths(ColumnsOutput out, DataTable data) { String[] months = getMonthRange(data); out.appendColumn(" from ").appendColumn(months[0]).appendColumn(" to ").appendColumn(months[1]); } private static void appendUnblamable(ColumnsOutput out, DataTable data) { long unblamable = round(data.filter(Consts.COL_AUTHOR, "").sum()); if (unblamable > 0) out.appendColumn(" (").appendColumn((int) unblamable).appendColumn(" umblamable)"); } private static double percent(double count, double total) { return count * 100 / total; } private static String[] getMonthRange(DataTable authorData) { List<String> result = new ArrayList<String>(authorData.getDistinct(Consts.COL_MONTH)); result.remove(""); Collections.sort(result); if (result.size() < 1) return new String[] { "unknown", "unknown" }; else return new String[] { result.get(0), result.get(result.size() - 1) }; } private static List<String> sortByText(Collection<String> data) { List<String> result = new ArrayList<String>(data); Collections.sort(result); return result; } private static List<String> sortByLines(DataTable data, int col) { final DataTable authorAndLines = data.groupBy(col); List<String> sorted = new ArrayList<String>(authorAndLines.getColumn(0)); Collections.sort(sorted, new Comparator<String>() { @Override public int compare(String o1, String o2) { return (int) (authorAndLines.get(o2) - authorAndLines.get(o1)); } }); return sorted; } private static List<String> preProcessExcludedPaths(Args args) { List<String> result = new ArrayList<String>(); for (String path : args.excludedPaths) result.add(normalizePath(path)); return result; } private static String normalizePath(String path) { String result = path.replace('\\', '/'); if (result.startsWith("/")) result = result.substring(1); if (!result.endsWith("/")) result += "/"; return result; } }
/* * Copyright 2010, 2011 sshj contributors, Cyril Ledru * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.schmizz.sshj.xfer.scp; import net.schmizz.sshj.common.IOUtils; import net.schmizz.sshj.xfer.LocalDestFile; import net.schmizz.sshj.xfer.scp.SCPEngine.Arg; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import java.util.List; /** Support for uploading files over a connected link using SCP. */ public final class SCPDownloadClient { private boolean recursiveMode = true; private final SCPEngine engine; SCPDownloadClient(SCPEngine engine) { this.engine = engine; } /** Download a file from {@code sourcePath} on the connected host to {@code targetPath} locally. */ public synchronized int copy(String sourcePath, LocalDestFile targetFile) throws IOException { engine.cleanSlate(); try { startCopy(sourcePath, targetFile); } finally { engine.exit(); } return engine.getExitStatus(); } public boolean getRecursiveMode() { return recursiveMode; } public void setRecursiveMode(boolean recursive) { this.recursiveMode = recursive; } void startCopy(String sourcePath, LocalDestFile targetFile) throws IOException { List<Arg> args = new LinkedList<Arg>(); args.add(Arg.SOURCE); args.add(Arg.QUIET); args.add(Arg.PRESERVE_TIMES); if (recursiveMode) args.add(Arg.RECURSIVE); engine.execSCPWith(args, sourcePath); engine.signal("Start status OK"); String msg = engine.readMessage(); do process(null, msg, targetFile); while (!(msg = engine.readMessage()).isEmpty()); } private long parseLong(String longString, String valType) throws SCPException { try { return Long.parseLong(longString); } catch (NumberFormatException nfe) { throw new SCPException("Could not parse " + valType + " from `" + longString + "`", nfe); } } /* e.g. "C0644" -> 0644; "D0755" -> 0755 */ private int parsePermissions(String cmd) throws SCPException { if (cmd.length() != 5) throw new SCPException("Could not parse permissions from `" + cmd + "`"); return Integer.parseInt(cmd.substring(1), 8); } private boolean process(String bufferedTMsg, String msg, LocalDestFile f) throws IOException { if (msg.length() < 1) throw new SCPException("Could not parse message `" + msg + "`"); switch (msg.charAt(0)) { case 'T': engine.signal("ACK: T"); process(msg, engine.readMessage(), f); break; case 'C': processFile(msg, bufferedTMsg, f); break; case 'D': processDirectory(msg, bufferedTMsg, f); break; case 'E': return true; case (char) 1: case (char) 2: throw new SCPException("Remote SCP command returned error: " + msg.substring(1)); default: final String err = "Unrecognized message: `" + msg + "`"; engine.sendMessage((char) 2 + err); throw new SCPException(err); } return false; } private void processDirectory(String dMsg, String tMsg, LocalDestFile f) throws IOException { final List<String> dMsgParts = tokenize(dMsg, 3, true); // D<perms> 0 <dirname> final long length = parseLong(dMsgParts.get(1), "dir length"); final String dirname = dMsgParts.get(2); if (length != 0) throw new IOException("Remote SCP command sent strange directory length: " + length); engine.startedDir(dirname); { f = f.getTargetDirectory(dirname); engine.signal("ACK: D"); do { } while (!process(null, engine.readMessage(), f)); setAttributes(f, parsePermissions(dMsgParts.get(0)), tMsg); engine.signal("ACK: E"); } engine.finishedDir(); } private void processFile(String cMsg, String tMsg, LocalDestFile f) throws IOException { final List<String> cMsgParts = tokenize(cMsg, 3, true); // C<perms> <size> <filename> final long length = parseLong(cMsgParts.get(1), "length"); final String filename = cMsgParts.get(2); engine.startedFile(filename, length); { f = f.getTargetFile(filename); engine.signal("Remote can start transfer"); final OutputStream dest = f.getOutputStream(); try { engine.transferFromRemote(dest, length); } finally { IOUtils.closeQuietly(dest); } engine.check("Remote agrees transfer done"); setAttributes(f, parsePermissions(cMsgParts.get(0)), tMsg); engine.signal("Transfer done"); } engine.finishedFile(); } private void setAttributes(LocalDestFile f, int perms, String tMsg) throws IOException { f.setPermissions(perms); if (tMsg != null) { List<String> tMsgParts = tokenize(tMsg, 4, false); // e.g. T<mtime> 0 <atime> 0 f.setLastModifiedTime(parseLong(tMsgParts.get(0).substring(1), "last modified time")); f.setLastAccessedTime(parseLong(tMsgParts.get(2), "last access time")); } } private static List<String> tokenize(String msg, int totalParts, boolean consolidateTail) throws IOException { List<String> parts = Arrays.asList(msg.split(" ")); if (parts.size() < totalParts || (!consolidateTail && parts.size() != totalParts)) throw new IOException("Could not parse message received from remote SCP: " + msg); if (consolidateTail && totalParts < parts.size()) { final StringBuilder sb = new StringBuilder(parts.get(totalParts - 1)); for (int i = totalParts; i < parts.size(); i++) { sb.append(" ").append(parts.get(i)); } parts = new ArrayList<String>(parts.subList(0, totalParts - 1)); parts.add(sb.toString()); } return parts; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.integration.amqp; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.ANONYMOUS_RELAY; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.CONNECTION_OPEN_FAILED; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.CONTAINER_ID; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.DELAYED_DELIVERY; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.INVALID_FIELD; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.PRODUCT; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.VERSION; import static org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport.contains; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.activemq.artemis.protocol.amqp.proton.AmqpSupport; import org.apache.activemq.artemis.spi.core.protocol.RemotingConnection; import org.apache.activemq.artemis.tests.util.Wait; import org.apache.activemq.artemis.utils.VersionLoader; import org.apache.activemq.transport.amqp.client.AmqpClient; import org.apache.activemq.transport.amqp.client.AmqpConnection; import org.apache.activemq.transport.amqp.client.AmqpValidator; import org.apache.qpid.proton.amqp.Symbol; import org.apache.qpid.proton.amqp.transport.AmqpError; import org.apache.qpid.proton.amqp.transport.ErrorCondition; import org.apache.qpid.proton.engine.Connection; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Tests for behaviors expected of the broker when clients connect to the broker */ public class AmqpInboundConnectionTest extends AmqpClientTestSupport { private static final Logger LOG = LoggerFactory.getLogger(AmqpInboundConnectionTest.class); private static final String BROKER_NAME = "localhost"; private static final String PRODUCT_NAME = "apache-activemq-artemis"; @Test(timeout = 60000) public void testCloseIsSentOnConnectionClose() throws Exception { AmqpClient client = createAmqpClient(); AmqpConnection amqpConnection = client.connect(); try { for (RemotingConnection connection : server.getRemotingService().getConnections()) { server.getRemotingService().removeConnection(connection); connection.disconnect(true); } Wait.assertTrue(amqpConnection::isClosed); assertEquals(AmqpSupport.CONNECTION_FORCED, amqpConnection.getConnection().getRemoteCondition().getCondition()); } finally { amqpConnection.close(); } } @Test(timeout = 60000) public void testBrokerContainerId() throws Exception { AmqpClient client = createAmqpClient(); assertNotNull(client); client.setValidator(new AmqpValidator() { @Override public void inspectOpenedResource(Connection connection) { if (!BROKER_NAME.equals(connection.getRemoteContainer())) { markAsInvalid("Broker did not send the expected container ID"); } } }); AmqpConnection connection = addConnection(client.connect()); try { assertNotNull(connection); connection.getStateInspector().assertValid(); } finally { connection.close(); } } @Test(timeout = 60000) public void testDefaultMaxFrameSize() throws Exception { AmqpClient client = createAmqpClient(); assertNotNull(client); client.setValidator(new AmqpValidator() { @Override public void inspectOpenedResource(Connection connection) { int brokerMaxFrameSize = connection.getTransport().getRemoteMaxFrameSize(); if (brokerMaxFrameSize != AmqpSupport.MAX_FRAME_SIZE_DEFAULT) { markAsInvalid("Broker did not send the expected max Frame Size"); } } }); AmqpConnection connection = addConnection(client.connect()); try { assertNotNull(connection); connection.getStateInspector().assertValid(); } finally { connection.close(); } } @Test(timeout = 60000) public void testBrokerConnectionProperties() throws Exception { AmqpClient client = createAmqpClient(); client.setValidator(new AmqpValidator() { @Override public void inspectOpenedResource(Connection connection) { Map<Symbol, Object> properties = connection.getRemoteProperties(); if (!properties.containsKey(PRODUCT)) { markAsInvalid("Broker did not send a queue product name value"); return; } if (!properties.containsKey(VERSION)) { markAsInvalid("Broker did not send a queue version value"); return; } if (!PRODUCT_NAME.equals(properties.get(PRODUCT))) { markAsInvalid("Broker did not send a the expected product name"); return; } String brokerVersion = VersionLoader.getVersion().getFullVersion(); if (!brokerVersion.equals(properties.get(VERSION))) { markAsInvalid("Broker did not send a the expected product version"); return; } } }); AmqpConnection connection = addConnection(client.connect()); try { assertNotNull(connection); connection.getStateInspector().assertValid(); } finally { connection.close(); } } @Test(timeout = 60000) public void testConnectionCarriesExpectedCapabilities() throws Exception { AmqpClient client = createAmqpClient(); assertNotNull(client); client.setValidator(new AmqpValidator() { @Override public void inspectOpenedResource(Connection connection) { Symbol[] offered = connection.getRemoteOfferedCapabilities(); if (!contains(offered, ANONYMOUS_RELAY)) { markAsInvalid("Broker did not indicate it support anonymous relay"); return; } if (!contains(offered, DELAYED_DELIVERY)) { markAsInvalid("Broker did not indicate it support delayed message delivery"); return; } } }); AmqpConnection connection = addConnection(client.connect()); try { assertNotNull(connection); connection.getStateInspector().assertValid(); } finally { connection.close(); } } @Test(timeout = 60000) public void testCanConnectWithDifferentContainerIds() throws Exception { AmqpClient client = createAmqpClient(); assertNotNull(client); AmqpConnection connection1 = addConnection(client.createConnection()); AmqpConnection connection2 = addConnection(client.createConnection()); connection1.setContainerId(getTestName() + "-Client:1"); connection2.setContainerId(getTestName() + "-Client:2"); connection1.connect(); Wait.assertEquals(1, server::getConnectionCount); connection2.connect(); Wait.assertEquals(2, server::getConnectionCount); connection1.close(); Wait.assertEquals(1, server::getConnectionCount); connection2.close(); Wait.assertEquals(0, server::getConnectionCount); } @Test(timeout = 60000) public void testCannotConnectWithSameContainerId() throws Exception { AmqpClient client = createAmqpClient(); List<Symbol> desiredCapabilities = new ArrayList<>(1); desiredCapabilities.add(AmqpSupport.SOLE_CONNECTION_CAPABILITY); assertNotNull(client); AmqpConnection connection1 = addConnection(client.createConnection()); AmqpConnection connection2 = addConnection(client.createConnection()); connection1.setDesiredCapabilities(desiredCapabilities); connection2.setDesiredCapabilities(desiredCapabilities); connection1.setContainerId(getTestName()); connection2.setContainerId(getTestName()); connection1.connect(); assertEquals(1, server.getConnectionCount()); connection2.setStateInspector(new AmqpValidator() { @Override public void inspectOpenedResource(Connection connection) { if (!connection.getRemoteProperties().containsKey(CONNECTION_OPEN_FAILED)) { markAsInvalid("Broker did not set connection establishment failed property"); } } @Override public void inspectClosedResource(Connection connection) { ErrorCondition remoteError = connection.getRemoteCondition(); if (remoteError == null || remoteError.getCondition() == null) { markAsInvalid("Broker did not add error condition for duplicate client ID"); } else { if (!remoteError.getCondition().equals(AmqpError.INVALID_FIELD)) { markAsInvalid("Broker did not set condition to " + AmqpError.INVALID_FIELD); } if (!remoteError.getCondition().equals(AmqpError.INVALID_FIELD)) { markAsInvalid("Broker did not set condition to " + AmqpError.INVALID_FIELD); } } // Validate the info map contains a hint that the container/client id was the // problem Map<?, ?> infoMap = remoteError.getInfo(); if (infoMap == null) { markAsInvalid("Broker did not set an info map on condition"); } else if (!infoMap.containsKey(INVALID_FIELD)) { markAsInvalid("Info map does not contain expected key"); } else { Object value = infoMap.get(INVALID_FIELD); if (!CONTAINER_ID.equals(value)) { markAsInvalid("Info map does not contain expected value: " + value); } } } }); try { connection2.connect(); fail("Should not be able to connect with same container Id."); } catch (Exception ex) { LOG.debug("Second connection with same container Id failed as expected."); } connection2.getStateInspector().assertValid(); connection2.close(); Wait.assertEquals(1, server::getConnectionCount); connection1.close(); Wait.assertEquals(0, server::getConnectionCount); } }
package com.avp42.flickmash.analysis; import com.avp42.flickmash.data.MovieData; import com.avp42.flickmash.tools.RankedMap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.omertron.themoviedbapi.model.MovieDb; import com.omertron.themoviedbapi.model.Person; import com.omertron.themoviedbapi.model.PersonCredit; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import static com.avp42.flickmash.Constants.Analysis.DEFAULT_NUM_RESULTS; import static com.avp42.flickmash.Constants.Analysis.SEPARATOR; public class Analysis { private final String query; private List<MovieDb> movies; private List<Person> people; private static final Comparator<MovieDb> movieDbPopularityComparator = new Comparator<MovieDb>() { @Override public int compare(MovieDb o1, MovieDb o2) { return Float.compare(o1.getPopularity(), o2.getPopularity()); } }; private static final Comparator<Person> personPopularityComparator = new Comparator<Person>() { @Override public int compare(Person o1, Person o2) { return Float.compare(o1.getPopularity(), o2.getPopularity()); } }; private Analysis(String query) { this.query = query; } public static Analysis of(String query) { return new Analysis(query); } public List<MovieDb> movieAnalysis() { return movieAnalysis(DEFAULT_NUM_RESULTS); } public List<MovieDb> movieAnalysis(int numResults) { if (this.movies != null) { return this.movies.subList(0, Math.min(numResults, this.movies.size())); } // Get a list of possible people from this. Set<Person> people = extractPeople(); RankedMap<Person, Long> rankedPeople = RankedMap.newRankedMap(personPopularityComparator); for (Person p : people) { rankedPeople.put(p, scorePerson(p)); } List<Person> topPeople = rankedPeople.toList(); topPeople = topPeople.subList(0, Math.min(query.split(SEPARATOR).length, topPeople.size())); for (Person p : rankedPeople.toList().subList(0, Math.min(numResults, rankedPeople.size()))) { System.out.printf("%s === %d\n", p.getName(), scorePerson(p)); } // Movie ID -> People who were in the movie Map<Integer, Set<Person>> castMap = getCastMap(Sets.newHashSet(topPeople)); // Movie ID -> Likelihood of person in movie RankedMap<Integer, Integer> scoreMap = getMovieScoreMap(castMap); // Retrieve MovieDb objects for the top 5 results. List<Map.Entry<Integer, Integer>> sortedMovieIdList = scoreMap.toEntryList(); RankedMap<MovieDb, Integer> movieDbScoreMap = RankedMap.newRankedMap(movieDbPopularityComparator); int mapSize = sortedMovieIdList.size(); for (int i = 0; i < Math.min(mapSize, numResults); i++) { Map.Entry<Integer, Integer> entry = sortedMovieIdList.get(i); MovieDb curMovie = MovieData.getInstance().getMovieById(entry.getKey()); movieDbScoreMap.put(curMovie, entry.getValue()); } this.movies = movieDbScoreMap.toList(); return this.movies; } private RankedMap<Integer, Integer> getMovieScoreMap(Map<Integer, Set<Person>> castMap) { RankedMap<Integer, Integer> scoreMap = RankedMap.newRankedMap(); for (Map.Entry<Integer, Set<Person>> entry : castMap.entrySet()) { int score = 0; Set<Person> entryValue = entry.getValue(); for (Person p : entryValue) { long personScore = scorePerson(p); if (query.toLowerCase().contains(p.getName().toLowerCase())) { personScore *= 2; } score += personScore; } scoreMap.put(entry.getKey(), score * castMap.size()); } return scoreMap; } private Map<Integer, Set<Person>> getCastMap(Set<Person> people) { Map<Integer, Set<Person>> creditMap = Maps.newHashMap(); for (Person p : people) { List<PersonCredit> credits = MovieData.getInstance().getPersonCredits(p.getId()); for (PersonCredit c : credits) { int title = c.getMovieId(); if (creditMap.containsKey(title)) { creditMap.get(title).add(p); } else { Set<Person> newSet = Sets.newHashSet(p); creditMap.put(title, newSet); } } } return creditMap; } public List<Person> personAnalysis() { return personAnalysis(DEFAULT_NUM_RESULTS); } public List<Person> personAnalysis(int numResults) { if (this.people != null) { return this.people; } Set<MovieDb> movies = extractMovies(); RankedMap<MovieDb, Long> rankedMovies = RankedMap.newRankedMap(); for (MovieDb movie : movies) { rankedMovies.put(movie, scoreMovie(movie)); } List<MovieDb> allSortedMovies = rankedMovies.toList(); List<MovieDb> sortedMovies = allSortedMovies.subList(0, Math.min(25, allSortedMovies.size())); System.out.println("Score,Title"); for (MovieDb movie : allSortedMovies) { System.out.printf("%d,%s\n", scoreMovie(movie), movie.getTitle()); } // person -> movies they were in Map<Person, Set<MovieDb>> personMap = Maps.newHashMap(); for (MovieDb movie : sortedMovies) { List<Person> cast = MovieData.getInstance().getMovieCast(movie.getId()); for (Person p : cast) { if (personMap.containsKey(p)) { personMap.get(p).add(movie); } else { personMap.put(p, Sets.newHashSet(movie)); } } } // person -> score RankedMap<Person, Integer> rankedPersonMap = RankedMap.newRankedMap(personPopularityComparator); for (Map.Entry<Person, Set<MovieDb>> entry : personMap.entrySet()) { int score = 0; for (MovieDb movie : entry.getValue()) { score += scoreMovie(movie); } rankedPersonMap.put(entry.getKey(), score * entry.getValue().size()); } // Take top numResults results List<Person> rankedPeople = Lists.newArrayList(Sets.newLinkedHashSet(rankedPersonMap.toList())); this.people = rankedPeople.subList(0, Math.min(numResults, rankedPeople.size())); return this.people; } private Set<Person> extractPeople() { Set<Person> people = Sets.newHashSet(); Set<String> queryList = AnalysisUtils.generateSubqueries(this.query); for (String q : queryList) { people.addAll(MovieData.getInstance().searchPeople(q)); } return people; } private Set<MovieDb> extractMovies() { Set<MovieDb> movies = Sets.newHashSet(); Set<String> queryList = AnalysisUtils.generateSubqueries(this.query); for (String q : queryList) { movies.addAll(MovieData.getInstance().searchMovies(q)); } return movies; } private long scorePerson(Person person) { String name = person.getName(); long score = QueryResultScorer.scoreQueryResult(query, name); return name.split(SEPARATOR).length < 2 ? score / 2 : score; } private long scoreMovie(MovieDb movie) { return QueryResultScorer.scoreQueryResult(query, movie.getTitle()); } }
/* * Copyright 2020 S. Webber * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.projog.core.predicate; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.projog.TermFactory.atom; import java.io.StringReader; import java.util.Arrays; import org.junit.Test; import org.projog.TestUtils; import org.projog.api.Projog; import org.projog.api.QueryPlan; import org.projog.api.QueryResult; import org.projog.core.ProjogException; import org.projog.core.predicate.udp.PredicateUtils; /** * Tests of attempting to replace or update an already defined predicate. * <p> * See: https://github.com/s-webber/projog/issues/195 */ public class PredicatesTest { private final PredicateKey KEY = new PredicateKey("test", 2); @Test public void testCannotReplacePredicateFactoryWithAnotherPredicateFactory() { Projog projog = new Projog(); // given that a build-in predicate is associated with the key projog.addPredicateFactory(KEY, mock(PredicateFactory.class)); // attempting to associate another built-in predicate with the key should cause an exception try { projog.addPredicateFactory(KEY, mock(PredicateFactory.class)); fail(); } catch (ProjogException e) { assertEquals("Already defined: test/2", e.getMessage()); } } @Test public void testCannotReplacePredicateFactoryWithNonDynamicUserDefinedPredicate() { Projog projog = new Projog(); // given that a build-in predicate is associated with the key projog.addPredicateFactory(KEY, mock(PredicateFactory.class)); // attempting to add user defined clauses for the key should cause an exception try { projog.consultReader(new StringReader("test(a, b).")); fail(); } catch (ProjogException e) { assertEquals("Cannot replace already defined built-in predicate: test/2", e.getCause().getMessage()); } } @Test public void testCannotReplacePredicateFactoryWithDynamicUserDefinedPredicate() { Projog projog = new Projog(); // given that a build-in predicate is associated with the key projog.addPredicateFactory(KEY, mock(PredicateFactory.class)); // attempting to add dynamic user defined clauses for the key should cause an exception try { projog.consultReader(new StringReader("?- dynamic(test/2). test(a, b).")); fail(); } catch (ProjogException e) { assertEquals("Cannot replace already defined built-in predicate: test/2", e.getCause().getMessage()); } } @Test public void testCannotReplacePredicateFactoryWithAssertedPredicate() { Projog projog = new Projog(); // given that a build-in predicate is associated with the key projog.addPredicateFactory(KEY, mock(PredicateFactory.class)); // attempting to assert clauses for the key should cause an exception try { projog.executeOnce("assert(test(a, b))."); fail(); } catch (ProjogException e) { assertEquals("Cannot replace already defined built-in predicate: test/2", e.getMessage()); } } @Test public void testCannotReplaceNonDynamicUserDefinedPredicateWithPredicateFactory() { Projog projog = new Projog(); // given that a non-dynamic user defined predicate is associated with the key projog.consultReader(new StringReader("test(a, b).")); // attempting to associate a built-in predicate with the key should cause an exception try { projog.addPredicateFactory(KEY, mock(PredicateFactory.class)); fail(); } catch (ProjogException e) { assertEquals("Already defined: test/2", e.getMessage()); } } @Test public void testCannotUpdateNonDynamicUserDefinedPredicateWithNonDynamicUserDefinedPredicate() { Projog projog = new Projog(); // given that a non-dynamic user defined predicate is associated with the key projog.consultReader(new StringReader("test(a, b).")); // attempting to add more user defined clauses for the key should cause an exception try { projog.consultReader(new StringReader("test(c, d).")); fail(); } catch (ProjogException e) { assertEquals( "Cannot append to already defined user defined predicate as it is not dynamic. You can set the predicate to dynamic by adding the following line to start of the file that the predicate is defined in:\n" + "?- dynamic(test/2).", e.getCause().getMessage()); } } @Test public void testCannotUpdateNonDynamicUserDefinedPredicateWithDynamicUserDefinedPredicate() { Projog projog = new Projog(); // given that a non-dynamic user defined predicate is associated with the key projog.consultReader(new StringReader("test(a, b).")); // attempting to add more user defined clauses for the key should cause an exception try { projog.consultReader(new StringReader("?- dynamic(test/2). test(c, d).")); fail(); } catch (ProjogException e) { assertEquals("Predicate has already been defined and is not dynamic: test/2", e.getCause().getMessage()); } } @Test public void testCannotUpdateNonDynamicUserDefinedPredicateWithAssertedPredicate() { Projog projog = new Projog(); // given that a non-dynamic user defined predicate is associated with the key projog.consultReader(new StringReader("test(a, b).")); // attempting to add more user defined clauses for the key should cause an exception try { projog.executeOnce("assert(test(a, b))."); fail(); } catch (ProjogException e) { assertEquals("Cannot append to already defined user defined predicate as it is not dynamic: test/2 clause: test(a, b)", e.getMessage()); } } @Test public void testCannotReplaceDynamicUserDefinedPredicateWithPredicateFactory() { Projog projog = new Projog(); // given that a dynamic user defined predicate is associated with the key projog.consultReader(new StringReader("?- dynamic(test/2). test(a, b).")); // attempting to associate a built-in predicate with the key should cause an exception try { projog.addPredicateFactory(KEY, mock(PredicateFactory.class)); fail(); } catch (ProjogException e) { assertEquals("Already defined: test/2", e.getMessage()); } } @Test public void testCanUpdateDynamicUserDefinedPredicateWithUserDefinedPredicate() { Projog projog = new Projog(); // given that a dynamic user defined predicate is associated with the key projog.consultReader(new StringReader("?- dynamic(test/2). test(a, b).")); // querying it should find the defined clause QueryResult r = projog.executeQuery("test(X, Y)."); assertTrue(r.next()); assertEquals("a", r.getAtomName("X")); assertEquals("b", r.getAtomName("Y")); assertFalse(r.next()); // attempting to add more user defined clauses for the key should succeed, as was declared dynamic when first consulted projog.consultReader(new StringReader("test(c, d). test(e, f).")); // querying it should find the original defined clause and the subsequently defined clauses r = projog.executeQuery("test(X, Y)."); assertTrue(r.next()); assertEquals("a", r.getAtomName("X")); assertEquals("b", r.getAtomName("Y")); assertTrue(r.next()); assertEquals("c", r.getAtomName("X")); assertEquals("d", r.getAtomName("Y")); assertTrue(r.next()); assertEquals("e", r.getAtomName("X")); assertEquals("f", r.getAtomName("Y")); assertFalse(r.next()); } @Test public void testCanUpdateDynamicUserDefinedPredicateWithDynamicUserDefinedPredicate() { Projog projog = new Projog(); // given that a dynamic user defined predicate is associated with the key projog.consultReader(new StringReader("?- dynamic(test/2). test(a, b).")); // querying it should find the defined clause QueryResult r = projog.executeQuery("test(X, Y)."); assertTrue(r.next()); assertEquals("a", r.getAtomName("X")); assertEquals("b", r.getAtomName("Y")); assertFalse(r.next()); // attempting to add more user defined clauses for the key should succeed, as was declared dynamic when first consulted projog.consultReader(new StringReader("?- dynamic(test/2). test(c, d). test(e, f).")); // querying it should find the original defined clause and the subsequently defined clauses r = projog.executeQuery("test(X, Y)."); assertTrue(r.next()); assertEquals("a", r.getAtomName("X")); assertEquals("b", r.getAtomName("Y")); assertTrue(r.next()); assertEquals("c", r.getAtomName("X")); assertEquals("d", r.getAtomName("Y")); assertTrue(r.next()); assertEquals("e", r.getAtomName("X")); assertEquals("f", r.getAtomName("Y")); assertFalse(r.next()); } @Test public void testCanUpdateAssertedPredicateWithUserDefinedPredicate() { Projog projog = new Projog(); // given that a clause has been asserted for the key projog.executeOnce("assert(test(a, b))."); // querying it should find the defined clause QueryResult r = projog.executeQuery("test(X, Y)."); assertTrue(r.next()); assertEquals("a", r.getAtomName("X")); assertEquals("b", r.getAtomName("Y")); assertFalse(r.next()); // attempting to add more user defined clauses for the key should succeed, as was first created via an assert so is dynamic projog.consultReader(new StringReader("test(c, d). test(e, f).")); // querying it should find the original defined clause and the subsequently defined clauses r = projog.executeQuery("test(X, Y)."); assertTrue(r.next()); assertEquals("a", r.getAtomName("X")); assertEquals("b", r.getAtomName("Y")); assertTrue(r.next()); assertEquals("c", r.getAtomName("X")); assertEquals("d", r.getAtomName("Y")); assertTrue(r.next()); assertEquals("e", r.getAtomName("X")); assertEquals("f", r.getAtomName("Y")); assertFalse(r.next()); } @Test public void testCannotReplaceAssertedPredicateWithPredicateFactory() { Projog projog = new Projog(); // given that a clause has been asserted for the key projog.executeOnce("assert(test(a, b))."); // attempting to associate a built-in predicate with the key should cause an exception try { projog.addPredicateFactory(KEY, mock(PredicateFactory.class)); fail(); } catch (ProjogException e) { assertEquals("Already defined: test/2", e.getMessage()); } } /** * Test scenario described in https://github.com/s-webber/projog/issues/195 * <p> * <pre> * 1. Consult resource containing facts that have been defined as dynamic, and a rule that uses those facts. * 2. Consult another resource that contains an additional fact for the predicate defined in step 1. * 3. Query the rule defined in step to confirm it uses that facts defined in both steps 1 and 2. * </pre> */ @Test public void testAppendToAlreadyDefinedClauseUsedByRule() { Projog projog = new Projog(); // given that a dynamic user defined predicate is associated with the key StringBuilder input1 = new StringBuilder(); input1.append("?- dynamic(test/2)."); // define as dynamic so can be updated by later consultations input1.append("test(a,1)."); input1.append("test(b,2)."); input1.append("test(c,3)."); input1.append("test(d,4)."); input1.append("test(e,5)."); input1.append("test(f,6)."); input1.append("test(g,7)."); input1.append("test(h,8)."); input1.append("test(i,9)."); input1.append("testRule(X) :- test(X, Y), Y mod 2 =:= 0."); projog.consultReader(new StringReader(input1.toString())); // querying it should find the defined clause QueryPlan plan = projog.createPlan("testRule(X)."); QueryResult r = plan.executeQuery(); assertTrue(r.next()); assertEquals("b", r.getAtomName("X")); assertTrue(r.next()); assertEquals("d", r.getAtomName("X")); assertTrue(r.next()); assertEquals("f", r.getAtomName("X")); assertTrue(r.next()); assertEquals("h", r.getAtomName("X")); assertFalse(r.next()); assertEquals(Arrays.asList("b", "d", "f", "h"), plan.createStatement().findAllAsAtomName()); // attempting to add more user defined clauses for the key should succeed, as was declared dynamic when first consulted StringBuilder input2 = new StringBuilder(); input2.append("test(a,1)."); projog.consultReader(new StringReader("test(j,10).")); // querying it should find the original defined clause and the subsequently defined clauses r = plan.executeQuery(); assertTrue(r.next()); assertEquals("b", r.getAtomName("X")); assertTrue(r.next()); assertEquals("d", r.getAtomName("X")); assertTrue(r.next()); assertEquals("f", r.getAtomName("X")); assertTrue(r.next()); assertEquals("h", r.getAtomName("X")); assertTrue(r.next()); assertEquals("j", r.getAtomName("X")); assertFalse(r.next()); assertEquals(Arrays.asList("b", "d", "f", "h", "j"), plan.createStatement().findAllAsAtomName()); } @Test public void testGetPredicate() { Predicates p = TestUtils.createKnowledgeBase().getPredicates(); assertSame(PredicateUtils.TRUE, p.getPredicate(atom("true"))); assertSame(PredicateUtils.FALSE, p.getPredicate(atom("does_not_exist"))); } }
/* * JBoss, Home of Professional Open Source * Copyright 2012 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @authors tag. All rights reserved. */ package org.searchisko.persistence.service; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.logging.Logger; import javax.persistence.EntityManager; import javax.persistence.EntityNotFoundException; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Root; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.StreamingOutput; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.searchisko.api.rest.ESDataOnlyResponse; import org.searchisko.persistence.jpa.model.ModelToJSONMapConverter; /** * JPA implementation of entity service. It's not session bean because type is unknown, so must be called from Session * bean to work with transactions! * * @author Libor Krzyzanek * @author Vlastimil Elias (velias at redhat dot com) * */ public class JpaEntityService<T> implements EntityService { protected Logger log; private EntityManager em; protected ModelToJSONMapConverter<T> converter; protected Class<T> entityType; public JpaEntityService(EntityManager em, ModelToJSONMapConverter<T> converter, Class<T> entityType) { log = Logger.getLogger(this.getClass().getName()); this.em = em; this.converter = converter; this.entityType = entityType; } @Override public StreamingOutput getAll(Integer from, Integer size, final String[] fieldsToRemove) { CriteriaBuilder cb = em.getCriteriaBuilder(); final List<T> result = listEntities(cb, from, size); CriteriaQuery<Long> queryCount = cb.createQuery(Long.class); queryCount.select(cb.count(queryCount.from(entityType))); final long count = em.createQuery(queryCount).getSingleResult(); return new StreamingOutput() { @Override public void write(OutputStream output) throws IOException, WebApplicationException { XContentBuilder builder = XContentFactory.jsonBuilder(output); builder.startObject(); if (result != null) { builder.field("total", count); builder.startArray("hits"); for (T t : result) { Map<String, Object> jsonData = converter.convertToJsonMap(t); builder.startObject(); builder.field("id", converter.getId(t)); builder.field("data", ESDataOnlyResponse.removeFields(jsonData, fieldsToRemove)); builder.endObject(); } } else { builder.field("total", 0); builder.startArray("hits"); } builder.endArray(); builder.endObject(); builder.close(); } }; } protected List<T> listEntities(CriteriaBuilder cb, Integer from, Integer size) { CriteriaQuery<T> queryList = cb.createQuery(entityType); Root<T> root = queryList.from(entityType); queryList.select(root); queryList.orderBy(cb.asc(root.get(converter.getEntityIdFieldName()))); TypedQuery<T> q = em.createQuery(queryList); if (from != null && from >= 0) q.setFirstResult(from); if (size != null && size > 0) q.setMaxResults(size); return q.getResultList(); } @Override public List<Map<String, Object>> getAll() { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<T> criteria = cb.createQuery(entityType); Root<T> root = criteria.from(entityType); criteria.select(root); final List<T> result = em.createQuery(criteria).getResultList(); List<Map<String, Object>> ret = new ArrayList<>(); try { for (T row : result) { ret.add(converter.convertToJsonMap(row)); } return ret; } catch (Exception e) { throw new RuntimeException(e); } } @Override public Map<String, Object> get(String id) { T jpaEntity = em.find(entityType, id); if (jpaEntity == null) { return null; } try { return converter.convertToJsonMap(jpaEntity); } catch (Exception e) { throw new RuntimeException(e); } } @Override public String create(Map<String, Object> entity) { String id = Strings.randomBase64UUID(); create(id, entity); return id; } @Override public void create(String id, Map<String, Object> entity) { try { T jpaEntity = em.find(entityType, id); if (jpaEntity != null) { // Entity exists. Only update the value converter.updateValue(jpaEntity, entity); } else { jpaEntity = converter.convertToModel(id, entity); em.persist(jpaEntity); } } catch (Exception e) { throw new RuntimeException(e); } } @Override public void update(String id, Map<String, Object> entity) { create(id, entity); } @Override public void delete(String id) { try { T reference = em.getReference(entityType, id); em.remove(reference); } catch (EntityNotFoundException e) { // OK } } protected int LIST_PAGE_SIZE = 200; protected static class ListRequestImpl implements ListRequest { List<ContentTuple<String, Map<String, Object>>> content; int beginIndex = 0; protected ListRequestImpl(int beginIndex, List<ContentTuple<String, Map<String, Object>>> content) { super(); this.beginIndex = beginIndex; this.content = content; } @Override public boolean hasContent() { return content != null && !content.isEmpty(); } @Override public List<ContentTuple<String, Map<String, Object>>> content() { return content; } } @Override public ListRequest listRequestInit() { return listRequestImpl(0); } @Override public ListRequest listRequestNext(ListRequest previous) { ListRequestImpl lr = (ListRequestImpl) previous; return listRequestImpl(lr.beginIndex + LIST_PAGE_SIZE); } protected ListRequest listRequestImpl(int beginIndex) { CriteriaBuilder cb = em.getCriteriaBuilder(); final List<T> result = listEntities(cb, beginIndex, LIST_PAGE_SIZE); List<ContentTuple<String, Map<String, Object>>> content = new ArrayList<>(10); for (T data : result) { try { content.add(converter.convertToContentTuple(data)); } catch (IOException e) { log.severe("Could not convert Entity.id=" + converter.getId(data) + " JSON content to valid object so skip it: " + e.getMessage()); } } return new ListRequestImpl(beginIndex, content); } }
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.dataelement; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Set; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.collections4.SetValuedMap; import org.hisp.dhis.category.Category; import org.hisp.dhis.category.CategoryCombo; import org.hisp.dhis.category.CategoryComboStore; import org.hisp.dhis.category.CategoryOption; import org.hisp.dhis.category.CategoryOptionCombo; import org.hisp.dhis.category.CategoryOptionComboStore; import org.hisp.dhis.category.CategoryOptionGroup; import org.hisp.dhis.category.CategoryOptionGroupSet; import org.hisp.dhis.category.CategoryOptionGroupSetStore; import org.hisp.dhis.category.CategoryOptionGroupStore; import org.hisp.dhis.category.CategoryOptionStore; import org.hisp.dhis.category.CategoryService; import org.hisp.dhis.category.CategoryStore; import org.hisp.dhis.common.DataDimensionType; import org.hisp.dhis.common.DeleteNotAllowedException; import org.hisp.dhis.common.IdScheme; import org.hisp.dhis.common.IdentifiableObjectManager; import org.hisp.dhis.dataset.DataSet; import org.hisp.dhis.dataset.DataSetElement; import org.hisp.dhis.program.jdbc.JdbcOrgUnitAssociationsStore; import org.hisp.dhis.security.acl.AccessStringHelper; import org.hisp.dhis.security.acl.AclService; import org.hisp.dhis.user.CurrentUserService; import org.hisp.dhis.user.User; import org.hisp.dhis.user.UserCredentials; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import com.google.common.collect.Lists; import com.google.common.collect.Sets; /** * @author Abyot Asalefew */ @Slf4j @Service( "org.hisp.dhis.category.CategoryService" ) @RequiredArgsConstructor public class DefaultCategoryService implements CategoryService { // ------------------------------------------------------------------------- // Dependencies // ------------------------------------------------------------------------- private final CategoryStore categoryStore; private final CategoryOptionStore categoryOptionStore; private final CategoryComboStore categoryComboStore; private final CategoryOptionComboStore categoryOptionComboStore; private final CategoryOptionGroupStore categoryOptionGroupStore; private final CategoryOptionGroupSetStore categoryOptionGroupSetStore; private final IdentifiableObjectManager idObjectManager; private final CurrentUserService currentUserService; private final AclService aclService; @Qualifier( "jdbcCategoryOptionOrgUnitAssociationsStore" ) private final JdbcOrgUnitAssociationsStore jdbcOrgUnitAssociationsStore; // ------------------------------------------------------------------------- // Category // ------------------------------------------------------------------------- @Override @Transactional public long addCategory( Category dataElementCategory ) { categoryStore.save( dataElementCategory ); return dataElementCategory.getId(); } @Override @Transactional public void updateCategory( Category dataElementCategory ) { categoryStore.update( dataElementCategory ); } @Override @Transactional public void deleteCategory( Category dataElementCategory ) { categoryStore.delete( dataElementCategory ); } @Override @Transactional( readOnly = true ) public List<Category> getAllDataElementCategories() { return categoryStore.getAll(); } @Override @Transactional( readOnly = true ) public Category getCategory( long id ) { return categoryStore.get( id ); } @Override @Transactional( readOnly = true ) public Category getCategory( String uid ) { return categoryStore.getByUid( uid ); } @Override @Transactional( readOnly = true ) public Category getCategoryByName( String name ) { List<Category> dataElementCategories = new ArrayList<>( categoryStore.getAllEqName( name ) ); if ( dataElementCategories.isEmpty() ) { return null; } return dataElementCategories.get( 0 ); } @Override @Transactional( readOnly = true ) public Category getDefaultCategory() { return getCategoryByName( Category.DEFAULT_NAME ); } @Override @Transactional( readOnly = true ) public List<Category> getDisaggregationCategories() { return categoryStore.getCategoriesByDimensionType( DataDimensionType.DISAGGREGATION ); } @Override @Transactional( readOnly = true ) public List<Category> getDisaggregationDataDimensionCategoriesNoAcl() { return categoryStore.getCategoriesNoAcl( DataDimensionType.DISAGGREGATION, true ); } @Override @Transactional( readOnly = true ) public List<Category> getAttributeCategories() { return categoryStore.getCategoriesByDimensionType( DataDimensionType.ATTRIBUTE ); } @Override @Transactional( readOnly = true ) public List<Category> getAttributeDataDimensionCategoriesNoAcl() { return categoryStore.getCategoriesNoAcl( DataDimensionType.ATTRIBUTE, true ); } // ------------------------------------------------------------------------- // CategoryOption // ------------------------------------------------------------------------- @Override @Transactional public long addCategoryOption( CategoryOption dataElementCategoryOption ) { categoryOptionStore.save( dataElementCategoryOption ); return dataElementCategoryOption.getId(); } @Override @Transactional public void updateCategoryOption( CategoryOption dataElementCategoryOption ) { categoryOptionStore.update( dataElementCategoryOption ); } @Override @Transactional public void deleteCategoryOption( CategoryOption dataElementCategoryOption ) { categoryOptionStore.delete( dataElementCategoryOption ); } @Override @Transactional( readOnly = true ) public CategoryOption getCategoryOption( long id ) { return categoryOptionStore.get( id ); } @Override @Transactional( readOnly = true ) public CategoryOption getCategoryOption( String uid ) { return categoryOptionStore.getByUid( uid ); } @Override @Transactional( readOnly = true ) public CategoryOption getCategoryOptionByName( String name ) { return categoryOptionStore.getByName( name ); } @Override @Transactional( readOnly = true ) public CategoryOption getDefaultCategoryOption() { return getCategoryOptionByName( CategoryOption.DEFAULT_NAME ); } @Override @Transactional( readOnly = true ) public List<CategoryOption> getAllCategoryOptions() { return categoryOptionStore.getAll(); } @Override @Transactional( readOnly = true ) public List<CategoryOption> getCategoryOptions( Category category ) { return categoryOptionStore.getCategoryOptions( category ); } @Override @Transactional( readOnly = true ) public List<CategoryOption> getDataWriteCategoryOptions( Category category, User user ) { if ( user == null ) { return Lists.newArrayList(); } return user.isSuper() ? getCategoryOptions( category ) : categoryOptionStore.getDataWriteCategoryOptions( category, user ); } @Override @Transactional( readOnly = true ) public Set<CategoryOption> getCoDimensionConstraints( UserCredentials userCredentials ) { Set<CategoryOption> options = null; Set<Category> catConstraints = userCredentials.getCatDimensionConstraints(); if ( catConstraints != null && !catConstraints.isEmpty() ) { options = new HashSet<>(); for ( Category category : catConstraints ) { options.addAll( getCategoryOptions( category ) ); } } return options; } // ------------------------------------------------------------------------- // CategoryCombo // ------------------------------------------------------------------------- @Override @Transactional public long addCategoryCombo( CategoryCombo dataElementCategoryCombo ) { categoryComboStore.save( dataElementCategoryCombo ); return dataElementCategoryCombo.getId(); } @Override @Transactional public void updateCategoryCombo( CategoryCombo dataElementCategoryCombo ) { categoryComboStore.update( dataElementCategoryCombo ); } @Override @Transactional public void deleteCategoryCombo( CategoryCombo dataElementCategoryCombo ) { categoryComboStore.delete( dataElementCategoryCombo ); } @Override @Transactional( readOnly = true ) public List<CategoryCombo> getAllCategoryCombos() { return categoryComboStore.getAll(); } @Override @Transactional( readOnly = true ) public CategoryCombo getCategoryCombo( long id ) { return categoryComboStore.get( id ); } @Override @Transactional( readOnly = true ) public CategoryCombo getCategoryCombo( String uid ) { return categoryComboStore.getByUid( uid ); } @Override @Transactional( readOnly = true ) public CategoryCombo getCategoryComboByName( String name ) { return categoryComboStore.getByName( name ); } @Override @Transactional( readOnly = true ) public CategoryCombo getDefaultCategoryCombo() { return getCategoryComboByName( CategoryCombo.DEFAULT_CATEGORY_COMBO_NAME ); } @Override @Transactional( readOnly = true ) public List<CategoryCombo> getDisaggregationCategoryCombos() { return categoryComboStore.getCategoryCombosByDimensionType( DataDimensionType.DISAGGREGATION ); } @Override @Transactional( readOnly = true ) public List<CategoryCombo> getAttributeCategoryCombos() { return categoryComboStore.getCategoryCombosByDimensionType( DataDimensionType.ATTRIBUTE ); } @Override @Transactional( readOnly = true ) public String validateCategoryCombo( CategoryCombo categoryCombo ) { if ( categoryCombo == null ) { return "category_combo_is_null"; } if ( categoryCombo.getCategories() == null || categoryCombo.getCategories().isEmpty() ) { return "category_combo_must_have_at_least_one_category"; } if ( Sets.newHashSet( categoryCombo.getCategories() ).size() < categoryCombo.getCategories().size() ) { return "category_combo_cannot_have_duplicate_categories"; } Set<CategoryOption> categoryOptions = new HashSet<>(); for ( Category category : categoryCombo.getCategories() ) { if ( category == null || category.getCategoryOptions().isEmpty() ) { return "categories_must_have_at_least_one_category_option"; } if ( !Sets.intersection( categoryOptions, Sets.newHashSet( category.getCategoryOptions() ) ).isEmpty() ) { return "categories_cannot_share_category_options"; } } return null; } // ------------------------------------------------------------------------- // CategoryOptionCombo // ------------------------------------------------------------------------- @Override @Transactional public long addCategoryOptionCombo( CategoryOptionCombo dataElementCategoryOptionCombo ) { categoryOptionComboStore.save( dataElementCategoryOptionCombo ); return dataElementCategoryOptionCombo.getId(); } @Override @Transactional public void updateCategoryOptionCombo( CategoryOptionCombo dataElementCategoryOptionCombo ) { categoryOptionComboStore.update( dataElementCategoryOptionCombo ); } @Override @Transactional public void deleteCategoryOptionCombo( CategoryOptionCombo dataElementCategoryOptionCombo ) { categoryOptionComboStore.delete( dataElementCategoryOptionCombo ); } @Override @Transactional( noRollbackFor = DeleteNotAllowedException.class ) public void deleteCategoryOptionComboNoRollback( CategoryOptionCombo categoryOptionCombo ) { categoryOptionComboStore.deleteNoRollBack( categoryOptionCombo ); } @Override @Transactional( readOnly = true ) public CategoryOptionCombo getCategoryOptionCombo( long id ) { return categoryOptionComboStore.get( id ); } @Override @Transactional( readOnly = true ) public CategoryOptionCombo getCategoryOptionCombo( String uid ) { return categoryOptionComboStore.getByUid( uid ); } @Override @Transactional( readOnly = true ) public CategoryOptionCombo getCategoryOptionComboByCode( String code ) { return categoryOptionComboStore.getByCode( code ); } @Override @Transactional( readOnly = true ) public CategoryOptionCombo getCategoryOptionCombo( CategoryCombo categoryCombo, Set<CategoryOption> categoryOptions ) { return categoryOptionComboStore.getCategoryOptionCombo( categoryCombo, categoryOptions ); } @Override @Transactional( readOnly = true ) public List<CategoryOptionCombo> getAllCategoryOptionCombos() { return categoryOptionComboStore.getAll(); } @Override @Transactional public void generateDefaultDimension() { // --------------------------------------------------------------------- // CategoryOption // --------------------------------------------------------------------- CategoryOption categoryOption = new CategoryOption( CategoryOption.DEFAULT_NAME ); categoryOption.setUid( "xYerKDKCefk" ); categoryOption.setCode( "default" ); addCategoryOption( categoryOption ); categoryOption.setPublicAccess( AccessStringHelper.CATEGORY_OPTION_DEFAULT ); updateCategoryOption( categoryOption ); // --------------------------------------------------------------------- // Category // --------------------------------------------------------------------- Category category = new Category( Category.DEFAULT_NAME, DataDimensionType.DISAGGREGATION ); category.setUid( "GLevLNI9wkl" ); category.setCode( "default" ); category.setShortName( "default" ); category.setDataDimension( false ); category.addCategoryOption( categoryOption ); addCategory( category ); category.setPublicAccess( AccessStringHelper.CATEGORY_NO_DATA_SHARING_DEFAULT ); updateCategory( category ); // --------------------------------------------------------------------- // CategoryCombo // --------------------------------------------------------------------- CategoryCombo categoryCombo = new CategoryCombo( CategoryCombo.DEFAULT_CATEGORY_COMBO_NAME, DataDimensionType.DISAGGREGATION ); categoryCombo.setUid( "bjDvmb4bfuf" ); categoryCombo.setCode( "default" ); categoryCombo.setDataDimensionType( DataDimensionType.DISAGGREGATION ); categoryCombo.addCategory( category ); addCategoryCombo( categoryCombo ); categoryCombo.setPublicAccess( AccessStringHelper.CATEGORY_NO_DATA_SHARING_DEFAULT ); updateCategoryCombo( categoryCombo ); // --------------------------------------------------------------------- // CategoryOptionCombo // --------------------------------------------------------------------- CategoryOptionCombo categoryOptionCombo = new CategoryOptionCombo(); categoryOptionCombo.setUid( "HllvX50cXC0" ); categoryOptionCombo.setCode( "default" ); categoryOptionCombo.setCategoryCombo( categoryCombo ); categoryOptionCombo.addCategoryOption( categoryOption ); addCategoryOptionCombo( categoryOptionCombo ); categoryOptionCombo.setPublicAccess( AccessStringHelper.CATEGORY_NO_DATA_SHARING_DEFAULT ); updateCategoryOptionCombo( categoryOptionCombo ); Set<CategoryOptionCombo> categoryOptionCombos = new HashSet<>(); categoryOptionCombos.add( categoryOptionCombo ); categoryCombo.setOptionCombos( categoryOptionCombos ); updateCategoryCombo( categoryCombo ); categoryOption.setCategoryOptionCombos( categoryOptionCombos ); updateCategoryOption( categoryOption ); } @Override @Transactional( readOnly = true ) public CategoryOptionCombo getDefaultCategoryOptionCombo() { return categoryOptionComboStore.getByName( CategoryCombo.DEFAULT_CATEGORY_COMBO_NAME ); } @Override @Transactional public void generateOptionCombos( CategoryCombo categoryCombo ) { categoryCombo.generateOptionCombos(); for ( CategoryOptionCombo optionCombo : categoryCombo.getOptionCombos() ) { categoryCombo.getOptionCombos().add( optionCombo ); addCategoryOptionCombo( optionCombo ); } updateCategoryCombo( categoryCombo ); } @Override @Transactional public void updateOptionCombos( Category category ) { for ( CategoryCombo categoryCombo : getAllCategoryCombos() ) { if ( categoryCombo.getCategories().contains( category ) ) { updateOptionCombos( categoryCombo ); } } } @Override @Transactional public void updateOptionCombos( CategoryCombo categoryCombo ) { if ( categoryCombo == null || !categoryCombo.isValid() ) { log.warn( "Category combo is null or invalid, could not update option combos: " + categoryCombo ); return; } List<CategoryOptionCombo> generatedOptionCombos = categoryCombo.generateOptionCombosList(); Set<CategoryOptionCombo> persistedOptionCombos = categoryCombo.getOptionCombos(); boolean modified = false; for ( CategoryOptionCombo optionCombo : generatedOptionCombos ) { if ( !persistedOptionCombos.contains( optionCombo ) ) { categoryCombo.getOptionCombos().add( optionCombo ); addCategoryOptionCombo( optionCombo ); log.info( "Added missing category option combo: " + optionCombo + " for category combo: " + categoryCombo.getName() ); modified = true; } } if ( modified ) { updateCategoryCombo( categoryCombo ); } } @Override @Transactional( readOnly = true ) public CategoryOptionCombo getCategoryOptionComboAcl( IdScheme idScheme, String id ) { CategoryOptionCombo coc = idObjectManager.getObject( CategoryOptionCombo.class, idScheme, id ); if ( coc != null ) { User user = currentUserService.getCurrentUser(); for ( CategoryOption categoryOption : coc.getCategoryOptions() ) { if ( !aclService.canDataWrite( user, categoryOption ) ) { return null; } } } return coc; } @Override @Transactional public void updateCategoryOptionComboNames() { categoryOptionComboStore.updateNames(); } // ------------------------------------------------------------------------- // DataElementOperand // ------------------------------------------------------------------------- @Override @Transactional( readOnly = true ) public List<DataElementOperand> getOperands( Collection<DataElement> dataElements ) { return getOperands( dataElements, false ); } @Override @Transactional( readOnly = true ) public List<DataElementOperand> getOperands( Collection<DataElement> dataElements, boolean includeTotals ) { List<DataElementOperand> operands = Lists.newArrayList(); for ( DataElement dataElement : dataElements ) { Set<CategoryCombo> categoryCombos = dataElement.getCategoryCombos(); boolean anyIsDefault = categoryCombos.stream().anyMatch( cc -> cc.isDefault() ); if ( includeTotals && !anyIsDefault ) { operands.add( new DataElementOperand( dataElement ) ); } for ( CategoryCombo categoryCombo : categoryCombos ) { operands.addAll( getOperands( dataElement, categoryCombo ) ); } } return operands; } @Override @Transactional( readOnly = true ) public List<DataElementOperand> getOperands( DataSet dataSet, boolean includeTotals ) { List<DataElementOperand> operands = Lists.newArrayList(); for ( DataSetElement element : dataSet.getDataSetElements() ) { CategoryCombo categoryCombo = element.getResolvedCategoryCombo(); if ( includeTotals && !categoryCombo.isDefault() ) { operands.add( new DataElementOperand( element.getDataElement() ) ); } operands.addAll( getOperands( element.getDataElement(), element.getResolvedCategoryCombo() ) ); } return operands; } private List<DataElementOperand> getOperands( DataElement dataElement, CategoryCombo categoryCombo ) { List<DataElementOperand> operands = Lists.newArrayList(); for ( CategoryOptionCombo categoryOptionCombo : categoryCombo.getSortedOptionCombos() ) { operands.add( new DataElementOperand( dataElement, categoryOptionCombo ) ); } return operands; } // ------------------------------------------------------------------------- // CategoryOptionGroup // ------------------------------------------------------------------------- @Override @Transactional public long saveCategoryOptionGroup( CategoryOptionGroup group ) { categoryOptionGroupStore.save( group ); return group.getId(); } @Override @Transactional public void updateCategoryOptionGroup( CategoryOptionGroup group ) { categoryOptionGroupStore.update( group ); } @Override @Transactional( readOnly = true ) public CategoryOptionGroup getCategoryOptionGroup( long id ) { return categoryOptionGroupStore.get( id ); } @Override @Transactional( readOnly = true ) public CategoryOptionGroup getCategoryOptionGroup( String uid ) { return categoryOptionGroupStore.getByUid( uid ); } @Override @Transactional public void deleteCategoryOptionGroup( CategoryOptionGroup group ) { categoryOptionGroupStore.delete( group ); } @Override @Transactional( readOnly = true ) public List<CategoryOptionGroup> getAllCategoryOptionGroups() { return categoryOptionGroupStore.getAll(); } @Override @Transactional( readOnly = true ) public List<CategoryOptionGroup> getCategoryOptionGroups( CategoryOptionGroupSet groupSet ) { return categoryOptionGroupStore.getCategoryOptionGroups( groupSet ); } @Override @Transactional( readOnly = true ) public Set<CategoryOptionGroup> getCogDimensionConstraints( UserCredentials userCredentials ) { Set<CategoryOptionGroup> groups = null; Set<CategoryOptionGroupSet> cogsConstraints = userCredentials.getCogsDimensionConstraints(); if ( cogsConstraints != null && !cogsConstraints.isEmpty() ) { groups = new HashSet<>(); for ( CategoryOptionGroupSet cogs : cogsConstraints ) { groups.addAll( getCategoryOptionGroups( cogs ) ); } } return groups; } // ------------------------------------------------------------------------- // CategoryOptionGroupSet // ------------------------------------------------------------------------- @Override @Transactional public long saveCategoryOptionGroupSet( CategoryOptionGroupSet group ) { categoryOptionGroupSetStore.save( group ); return group.getId(); } @Override @Transactional public void updateCategoryOptionGroupSet( CategoryOptionGroupSet group ) { categoryOptionGroupSetStore.update( group ); } @Override @Transactional( readOnly = true ) public CategoryOptionGroupSet getCategoryOptionGroupSet( long id ) { return categoryOptionGroupSetStore.get( id ); } @Override @Transactional( readOnly = true ) public CategoryOptionGroupSet getCategoryOptionGroupSet( String uid ) { return categoryOptionGroupSetStore.getByUid( uid ); } @Override @Transactional public void deleteCategoryOptionGroupSet( CategoryOptionGroupSet group ) { categoryOptionGroupSetStore.delete( group ); } @Override @Transactional( readOnly = true ) public List<CategoryOptionGroupSet> getAllCategoryOptionGroupSets() { return categoryOptionGroupSetStore.getAll(); } @Override @Transactional( readOnly = true ) public List<CategoryOptionGroupSet> getDisaggregationCategoryOptionGroupSetsNoAcl() { return categoryOptionGroupSetStore.getCategoryOptionGroupSetsNoAcl( DataDimensionType.DISAGGREGATION, true ); } @Override @Transactional( readOnly = true ) public List<CategoryOptionGroupSet> getAttributeCategoryOptionGroupSetsNoAcl() { return categoryOptionGroupSetStore.getCategoryOptionGroupSetsNoAcl( DataDimensionType.ATTRIBUTE, true ); } @Override public SetValuedMap<String, String> getCategoryOptionOrganisationUnitsAssociations( Set<String> uids ) { return jdbcOrgUnitAssociationsStore.getOrganisationUnitsAssociationsForCurrentUser( uids ); } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.pinpointemail.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * A request to send an email message. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-email-2018-07-26/SendEmail" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class SendEmailRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The email address that you want to use as the "From" address for the email. The address that you specify has to * be verified. * </p> */ private String fromEmailAddress; /** * <p> * An object that contains the recipients of the email message. * </p> */ private Destination destination; /** * <p> * The "Reply-to" email addresses for the message. When the recipient replies to the message, each Reply-to address * receives the reply. * </p> */ private java.util.List<String> replyToAddresses; /** * <p> * The address that Amazon Pinpoint should send bounce and complaint notifications to. * </p> */ private String feedbackForwardingEmailAddress; /** * <p> * An object that contains the body of the message. You can send either a Simple message or a Raw message. * </p> */ private EmailContent content; /** * <p> * A list of tags, in the form of name/value pairs, to apply to an email that you send using the * <code>SendEmail</code> operation. Tags correspond to characteristics of the email that you define, so that you * can publish email sending events. * </p> */ private java.util.List<MessageTag> emailTags; /** * <p> * The name of the configuration set that you want to use when sending the email. * </p> */ private String configurationSetName; /** * <p> * The email address that you want to use as the "From" address for the email. The address that you specify has to * be verified. * </p> * * @param fromEmailAddress * The email address that you want to use as the "From" address for the email. The address that you specify * has to be verified. */ public void setFromEmailAddress(String fromEmailAddress) { this.fromEmailAddress = fromEmailAddress; } /** * <p> * The email address that you want to use as the "From" address for the email. The address that you specify has to * be verified. * </p> * * @return The email address that you want to use as the "From" address for the email. The address that you specify * has to be verified. */ public String getFromEmailAddress() { return this.fromEmailAddress; } /** * <p> * The email address that you want to use as the "From" address for the email. The address that you specify has to * be verified. * </p> * * @param fromEmailAddress * The email address that you want to use as the "From" address for the email. The address that you specify * has to be verified. * @return Returns a reference to this object so that method calls can be chained together. */ public SendEmailRequest withFromEmailAddress(String fromEmailAddress) { setFromEmailAddress(fromEmailAddress); return this; } /** * <p> * An object that contains the recipients of the email message. * </p> * * @param destination * An object that contains the recipients of the email message. */ public void setDestination(Destination destination) { this.destination = destination; } /** * <p> * An object that contains the recipients of the email message. * </p> * * @return An object that contains the recipients of the email message. */ public Destination getDestination() { return this.destination; } /** * <p> * An object that contains the recipients of the email message. * </p> * * @param destination * An object that contains the recipients of the email message. * @return Returns a reference to this object so that method calls can be chained together. */ public SendEmailRequest withDestination(Destination destination) { setDestination(destination); return this; } /** * <p> * The "Reply-to" email addresses for the message. When the recipient replies to the message, each Reply-to address * receives the reply. * </p> * * @return The "Reply-to" email addresses for the message. When the recipient replies to the message, each Reply-to * address receives the reply. */ public java.util.List<String> getReplyToAddresses() { return replyToAddresses; } /** * <p> * The "Reply-to" email addresses for the message. When the recipient replies to the message, each Reply-to address * receives the reply. * </p> * * @param replyToAddresses * The "Reply-to" email addresses for the message. When the recipient replies to the message, each Reply-to * address receives the reply. */ public void setReplyToAddresses(java.util.Collection<String> replyToAddresses) { if (replyToAddresses == null) { this.replyToAddresses = null; return; } this.replyToAddresses = new java.util.ArrayList<String>(replyToAddresses); } /** * <p> * The "Reply-to" email addresses for the message. When the recipient replies to the message, each Reply-to address * receives the reply. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setReplyToAddresses(java.util.Collection)} or {@link #withReplyToAddresses(java.util.Collection)} if you * want to override the existing values. * </p> * * @param replyToAddresses * The "Reply-to" email addresses for the message. When the recipient replies to the message, each Reply-to * address receives the reply. * @return Returns a reference to this object so that method calls can be chained together. */ public SendEmailRequest withReplyToAddresses(String... replyToAddresses) { if (this.replyToAddresses == null) { setReplyToAddresses(new java.util.ArrayList<String>(replyToAddresses.length)); } for (String ele : replyToAddresses) { this.replyToAddresses.add(ele); } return this; } /** * <p> * The "Reply-to" email addresses for the message. When the recipient replies to the message, each Reply-to address * receives the reply. * </p> * * @param replyToAddresses * The "Reply-to" email addresses for the message. When the recipient replies to the message, each Reply-to * address receives the reply. * @return Returns a reference to this object so that method calls can be chained together. */ public SendEmailRequest withReplyToAddresses(java.util.Collection<String> replyToAddresses) { setReplyToAddresses(replyToAddresses); return this; } /** * <p> * The address that Amazon Pinpoint should send bounce and complaint notifications to. * </p> * * @param feedbackForwardingEmailAddress * The address that Amazon Pinpoint should send bounce and complaint notifications to. */ public void setFeedbackForwardingEmailAddress(String feedbackForwardingEmailAddress) { this.feedbackForwardingEmailAddress = feedbackForwardingEmailAddress; } /** * <p> * The address that Amazon Pinpoint should send bounce and complaint notifications to. * </p> * * @return The address that Amazon Pinpoint should send bounce and complaint notifications to. */ public String getFeedbackForwardingEmailAddress() { return this.feedbackForwardingEmailAddress; } /** * <p> * The address that Amazon Pinpoint should send bounce and complaint notifications to. * </p> * * @param feedbackForwardingEmailAddress * The address that Amazon Pinpoint should send bounce and complaint notifications to. * @return Returns a reference to this object so that method calls can be chained together. */ public SendEmailRequest withFeedbackForwardingEmailAddress(String feedbackForwardingEmailAddress) { setFeedbackForwardingEmailAddress(feedbackForwardingEmailAddress); return this; } /** * <p> * An object that contains the body of the message. You can send either a Simple message or a Raw message. * </p> * * @param content * An object that contains the body of the message. You can send either a Simple message or a Raw message. */ public void setContent(EmailContent content) { this.content = content; } /** * <p> * An object that contains the body of the message. You can send either a Simple message or a Raw message. * </p> * * @return An object that contains the body of the message. You can send either a Simple message or a Raw message. */ public EmailContent getContent() { return this.content; } /** * <p> * An object that contains the body of the message. You can send either a Simple message or a Raw message. * </p> * * @param content * An object that contains the body of the message. You can send either a Simple message or a Raw message. * @return Returns a reference to this object so that method calls can be chained together. */ public SendEmailRequest withContent(EmailContent content) { setContent(content); return this; } /** * <p> * A list of tags, in the form of name/value pairs, to apply to an email that you send using the * <code>SendEmail</code> operation. Tags correspond to characteristics of the email that you define, so that you * can publish email sending events. * </p> * * @return A list of tags, in the form of name/value pairs, to apply to an email that you send using the * <code>SendEmail</code> operation. Tags correspond to characteristics of the email that you define, so * that you can publish email sending events. */ public java.util.List<MessageTag> getEmailTags() { return emailTags; } /** * <p> * A list of tags, in the form of name/value pairs, to apply to an email that you send using the * <code>SendEmail</code> operation. Tags correspond to characteristics of the email that you define, so that you * can publish email sending events. * </p> * * @param emailTags * A list of tags, in the form of name/value pairs, to apply to an email that you send using the * <code>SendEmail</code> operation. Tags correspond to characteristics of the email that you define, so that * you can publish email sending events. */ public void setEmailTags(java.util.Collection<MessageTag> emailTags) { if (emailTags == null) { this.emailTags = null; return; } this.emailTags = new java.util.ArrayList<MessageTag>(emailTags); } /** * <p> * A list of tags, in the form of name/value pairs, to apply to an email that you send using the * <code>SendEmail</code> operation. Tags correspond to characteristics of the email that you define, so that you * can publish email sending events. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setEmailTags(java.util.Collection)} or {@link #withEmailTags(java.util.Collection)} if you want to * override the existing values. * </p> * * @param emailTags * A list of tags, in the form of name/value pairs, to apply to an email that you send using the * <code>SendEmail</code> operation. Tags correspond to characteristics of the email that you define, so that * you can publish email sending events. * @return Returns a reference to this object so that method calls can be chained together. */ public SendEmailRequest withEmailTags(MessageTag... emailTags) { if (this.emailTags == null) { setEmailTags(new java.util.ArrayList<MessageTag>(emailTags.length)); } for (MessageTag ele : emailTags) { this.emailTags.add(ele); } return this; } /** * <p> * A list of tags, in the form of name/value pairs, to apply to an email that you send using the * <code>SendEmail</code> operation. Tags correspond to characteristics of the email that you define, so that you * can publish email sending events. * </p> * * @param emailTags * A list of tags, in the form of name/value pairs, to apply to an email that you send using the * <code>SendEmail</code> operation. Tags correspond to characteristics of the email that you define, so that * you can publish email sending events. * @return Returns a reference to this object so that method calls can be chained together. */ public SendEmailRequest withEmailTags(java.util.Collection<MessageTag> emailTags) { setEmailTags(emailTags); return this; } /** * <p> * The name of the configuration set that you want to use when sending the email. * </p> * * @param configurationSetName * The name of the configuration set that you want to use when sending the email. */ public void setConfigurationSetName(String configurationSetName) { this.configurationSetName = configurationSetName; } /** * <p> * The name of the configuration set that you want to use when sending the email. * </p> * * @return The name of the configuration set that you want to use when sending the email. */ public String getConfigurationSetName() { return this.configurationSetName; } /** * <p> * The name of the configuration set that you want to use when sending the email. * </p> * * @param configurationSetName * The name of the configuration set that you want to use when sending the email. * @return Returns a reference to this object so that method calls can be chained together. */ public SendEmailRequest withConfigurationSetName(String configurationSetName) { setConfigurationSetName(configurationSetName); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getFromEmailAddress() != null) sb.append("FromEmailAddress: ").append(getFromEmailAddress()).append(","); if (getDestination() != null) sb.append("Destination: ").append(getDestination()).append(","); if (getReplyToAddresses() != null) sb.append("ReplyToAddresses: ").append(getReplyToAddresses()).append(","); if (getFeedbackForwardingEmailAddress() != null) sb.append("FeedbackForwardingEmailAddress: ").append(getFeedbackForwardingEmailAddress()).append(","); if (getContent() != null) sb.append("Content: ").append(getContent()).append(","); if (getEmailTags() != null) sb.append("EmailTags: ").append(getEmailTags()).append(","); if (getConfigurationSetName() != null) sb.append("ConfigurationSetName: ").append(getConfigurationSetName()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof SendEmailRequest == false) return false; SendEmailRequest other = (SendEmailRequest) obj; if (other.getFromEmailAddress() == null ^ this.getFromEmailAddress() == null) return false; if (other.getFromEmailAddress() != null && other.getFromEmailAddress().equals(this.getFromEmailAddress()) == false) return false; if (other.getDestination() == null ^ this.getDestination() == null) return false; if (other.getDestination() != null && other.getDestination().equals(this.getDestination()) == false) return false; if (other.getReplyToAddresses() == null ^ this.getReplyToAddresses() == null) return false; if (other.getReplyToAddresses() != null && other.getReplyToAddresses().equals(this.getReplyToAddresses()) == false) return false; if (other.getFeedbackForwardingEmailAddress() == null ^ this.getFeedbackForwardingEmailAddress() == null) return false; if (other.getFeedbackForwardingEmailAddress() != null && other.getFeedbackForwardingEmailAddress().equals(this.getFeedbackForwardingEmailAddress()) == false) return false; if (other.getContent() == null ^ this.getContent() == null) return false; if (other.getContent() != null && other.getContent().equals(this.getContent()) == false) return false; if (other.getEmailTags() == null ^ this.getEmailTags() == null) return false; if (other.getEmailTags() != null && other.getEmailTags().equals(this.getEmailTags()) == false) return false; if (other.getConfigurationSetName() == null ^ this.getConfigurationSetName() == null) return false; if (other.getConfigurationSetName() != null && other.getConfigurationSetName().equals(this.getConfigurationSetName()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getFromEmailAddress() == null) ? 0 : getFromEmailAddress().hashCode()); hashCode = prime * hashCode + ((getDestination() == null) ? 0 : getDestination().hashCode()); hashCode = prime * hashCode + ((getReplyToAddresses() == null) ? 0 : getReplyToAddresses().hashCode()); hashCode = prime * hashCode + ((getFeedbackForwardingEmailAddress() == null) ? 0 : getFeedbackForwardingEmailAddress().hashCode()); hashCode = prime * hashCode + ((getContent() == null) ? 0 : getContent().hashCode()); hashCode = prime * hashCode + ((getEmailTags() == null) ? 0 : getEmailTags().hashCode()); hashCode = prime * hashCode + ((getConfigurationSetName() == null) ? 0 : getConfigurationSetName().hashCode()); return hashCode; } @Override public SendEmailRequest clone() { return (SendEmailRequest) super.clone(); } }
/******************************************************************************* * Copyright FUJITSU LIMITED 2017 *******************************************************************************/ package org.oscm.test.ejb; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Map; import java.util.concurrent.Callable; import javax.ejb.ApplicationException; import javax.ejb.EJBException; import javax.ejb.EJBTransactionRequiredException; import javax.ejb.EJBTransactionRolledbackException; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionAttributeType; import javax.transaction.Status; import javax.transaction.Transaction; /** * Wrapper for session bean instances to emulate EJB container behavior * regarding transaction handling and exceptions. See EJB 3.0 Core Spec, 16.6.2 * and 14.3.1. * * @author hoffmann */ final class TransactionInvocationHandlers { /** * Internal handler in case we don't have to consider any transaction. */ static final IInvocationHandler HANDLER_NOTX = new IInvocationHandler() { public Object call(Callable<Object> callable, IInvocationHandler.IInvocationCtx ctx) throws Exception { try { return callable.call(); } catch (Exception ex) { if (ctx.isApplicationException(ex)) { throw ex; } else { throw setCause(new EJBException(ex)); } } } }; /** * Internal handler in case we work within a existing transaction. */ static final IInvocationHandler HANDLER_WITHINTX = new IInvocationHandler() { public Object call(Callable<Object> callable, IInvocationHandler.IInvocationCtx ctx) throws Exception { try { return callable.call(); } catch (Exception ex) { if (ctx.isApplicationException(ex)) { if (hasRollbackAnnotation(ex)) { ctx.getTransactionManager().setRollbackOnly(); } throw ex; } else { ctx.getTransactionManager().setRollbackOnly(); throw setCause(new EJBTransactionRolledbackException( "Rollback due to exception.", ex)); } } } }; /** * Internal handler in case we need to create a new transaction. */ static final IInvocationHandler HANDLER_NEWTX = new IInvocationHandler() { public Object call(Callable<Object> callable, IInvocationHandler.IInvocationCtx ctx) throws Exception { final Object result; ctx.getTransactionManager().begin(); try { result = callable.call(); } catch (Exception ex) { if (ctx.isApplicationException(ex)) { if (hasRollbackAnnotation(ex) || ctx.getTransactionManager().getStatus() == Status.STATUS_MARKED_ROLLBACK) { ctx.getTransactionManager().rollback(); } else { commit(ctx); } throw ex; } ctx.getTransactionManager().rollback(); throw setCause(new EJBException(ex)); } catch (Error err) { // Required for AssertErrors thrown by JUnit4 assertions: ctx.getTransactionManager().rollback(); throw err; } if (ctx.getTransactionManager().getStatus() == Status.STATUS_MARKED_ROLLBACK) { ctx.getTransactionManager().rollback(); throw new EJBTransactionRolledbackException(); } commit(ctx); return result; } private void commit(IInvocationHandler.IInvocationCtx ctx) { try { ctx.getTransactionManager().commit(); } catch (Exception ex) { throw setCause(new EJBTransactionRolledbackException( "Commit failed.", ex)); } } }; private static EJBException setCause(EJBException ex) { return ex; } private static boolean hasRollbackAnnotation(Exception ex) { ApplicationException annotation = ex.getClass().getAnnotation( ApplicationException.class); return annotation != null && annotation.rollback(); } /** * Transaction mode NOT_SUPPORTED. */ public static final IInvocationHandler TX_NOT_SUPPORTED = suspend(HANDLER_NOTX); /** * Transaction mode REQUIRED. */ public static final IInvocationHandler TX_REQUIRED = new HandlerSwitch( HANDLER_WITHINTX, HANDLER_NEWTX); /** * Transaction mode REQUIRED. */ public static final IInvocationHandler TX_SUPPORTS = new HandlerSwitch( HANDLER_WITHINTX, HANDLER_NOTX); /** * Transaction mode REQUIRES_NEW. */ public static final IInvocationHandler TX_REQUIRES_NEW = suspend(HANDLER_NEWTX); /** * Transaction mode MANDATORY. */ public static final IInvocationHandler TX_MANDATORY = new HandlerSwitch( HANDLER_WITHINTX, new IInvocationHandler() { public Object call(Callable<Object> callable, IInvocationCtx ctx) throws Exception { throw new EJBTransactionRequiredException( "Transaction required (MANDATORY)."); } }); /** * Transaction mode NEWER. */ public static final IInvocationHandler TX_NEVER = new HandlerSwitch( new IInvocationHandler() { public Object call(Callable<Object> callable, IInvocationCtx ctx) throws Exception { throw new EJBException("Transaction not allowed (NEVER)."); } }, HANDLER_NOTX); /** * Handler that delegates to one of two other handler depending whether a * transaction is active or not. */ private static class HandlerSwitch implements IInvocationHandler { private final IInvocationHandler withTx, withoutTx; HandlerSwitch(IInvocationHandler withTx, IInvocationHandler withoutTx) { this.withTx = withTx; this.withoutTx = withoutTx; } public Object call(Callable<Object> callable, IInvocationCtx ctx) throws Exception { if (ctx.getTransactionManager().getStatus() == Status.STATUS_ACTIVE) { return withTx.call(callable, ctx); } else { return withoutTx.call(callable, ctx); } } } /** * Suspends the current transaction (if any) during the given handler is * called. * * @param delegate * @return */ private static IInvocationHandler suspend(final IInvocationHandler delegate) { return new IInvocationHandler() { public Object call(Callable<Object> callable, IInvocationHandler.IInvocationCtx ctx) throws Exception { final Transaction suspended = ctx.getTransactionManager() .suspend(); try { return delegate.call(callable, ctx); } finally { if (suspended != null) { ctx.getTransactionManager().resume(suspended); } } } }; } private static final Map<TransactionAttributeType, IInvocationHandler> TYPE2HANDLERS = new HashMap<TransactionAttributeType, IInvocationHandler>(); static { TYPE2HANDLERS.put(TransactionAttributeType.NOT_SUPPORTED, TX_NOT_SUPPORTED); TYPE2HANDLERS.put(TransactionAttributeType.REQUIRED, TX_REQUIRED); TYPE2HANDLERS.put(TransactionAttributeType.SUPPORTS, TX_SUPPORTS); TYPE2HANDLERS.put(TransactionAttributeType.REQUIRES_NEW, TX_REQUIRES_NEW); TYPE2HANDLERS.put(TransactionAttributeType.MANDATORY, TX_MANDATORY); TYPE2HANDLERS.put(TransactionAttributeType.NEVER, TX_NEVER); } /** * Returns the handler suitable for the given transaction type. * * @param type * @return */ public static IInvocationHandler getHandlerFor(TransactionAttributeType type) { return TYPE2HANDLERS.get(type); } /** * Returns the handler suitable for the given bean implementation class. * * @param beanClass * @return */ public static IInvocationHandler getHandlerFor(Class<?> beanClass) { TransactionAttribute attr = beanClass .getAnnotation(TransactionAttribute.class); if (attr != null) { return getHandlerFor(attr.value()); } // Default is REQUIRED (see EJB 3.0 Core Spec, 13.3.7) return TX_REQUIRED; } /** * Returns the handler suitable for the given bean implementation class and * method. * * @param beanClass * @return */ public static IInvocationHandler getHandlerFor(Class<?> beanClass, Method beanMethod) { TransactionAttribute attr = beanMethod .getAnnotation(TransactionAttribute.class); if (attr != null) { return getHandlerFor(attr.value()); } // Default is class scope: return getHandlerFor(beanClass); } }
package org.jsondoc.core.doc; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.jsondoc.core.annotation.ApiObject; import org.jsondoc.core.annotation.ApiObjectField; import org.jsondoc.core.annotation.ApiVersion; import org.jsondoc.core.pojo.ApiObjectDoc; import org.jsondoc.core.pojo.ApiObjectFieldDoc; import org.jsondoc.core.pojo.ApiStage; import org.jsondoc.core.pojo.ApiVisibility; import org.jsondoc.core.scanner.DefaultJSONDocScanner; import org.jsondoc.core.scanner.JSONDocScanner; import org.junit.Assert; import org.junit.Test; public class ApiObjectDocTest { private JSONDocScanner jsondocScanner = new DefaultJSONDocScanner(); @ApiObject(name="test-object", visibility = ApiVisibility.PUBLIC, stage = ApiStage.PRE_ALPHA) @ApiVersion(since = "1.0", until = "2.12") private class TestObject { @ApiObjectField(description="the test name", required = true) private String name; @ApiObjectField(description="the test age", required = false) private Integer age; @ApiObjectField(description="the test avg") private Long avg; @ApiObjectField(description="the test map") private Map<String, Integer> map; @SuppressWarnings("rawtypes") @ApiObjectField(description="an unparametrized list to test https://github.com/fabiomaffioletti/jsondoc/issues/5") private List unparametrizedList; @ApiObjectField(description="a parametrized list") private List<String> parametrizedList; @ApiObjectField(description="a wildcard parametrized list to test https://github.com/fabiomaffioletti/jsondoc/issues/5") private List<?> wildcardParametrized; @ApiObjectField(description="a Long array to test https://github.com/fabiomaffioletti/jsondoc/issues/27") private Long[] LongArray; @ApiObjectField(description="a long array to test https://github.com/fabiomaffioletti/jsondoc/issues/27") private long[] longArray; @ApiObjectField(name = "foo_bar", description="a property to test https://github.com/fabiomaffioletti/jsondoc/pull/31", required = true) private String fooBar; @ApiObjectField(name = "version", description="a property to test version since and until", required = true) @ApiVersion(since = "1.0", until = "2.12") private String version; @ApiObjectField(name = "test-enum", description = "a test enum") private TestEnum testEnum; @ApiObjectField(name = "test-enum-with-allowed-values", description = "a test enum with allowed values", allowedvalues = { "A", "B", "C" }) private TestEnum testEnumWithAllowedValues; @ApiObjectField(name = "orderedProperty", order = 1) private String orderedProperty; } @ApiObject(name = "test-enum") private enum TestEnum { TESTENUM1, TESTENUM2, TESTENUM3; } @ApiObject private class NoNameApiObject { @ApiObjectField private Long id; } @ApiObject private class TemplateApiObject { @ApiObjectField private Long id; @ApiObjectField private String name; } @ApiObject private class UndefinedVisibilityAndStage { } @Test public void testUndefinedVisibilityAndStageDoc() { Set<Class<?>> classes = new HashSet<Class<?>>(); classes.add(UndefinedVisibilityAndStage.class); ApiObjectDoc apiObjectDoc = jsondocScanner.getApiObjectDocs(classes).iterator().next(); Assert.assertEquals("undefinedvisibilityandstage", apiObjectDoc.getName()); Assert.assertEquals(ApiVisibility.UNDEFINED, apiObjectDoc.getVisibility()); Assert.assertEquals(ApiStage.UNDEFINED, apiObjectDoc.getStage()); } @Test public void testTemplateApiObjectDoc() { Set<Class<?>> classes = new HashSet<Class<?>>(); classes.add(TemplateApiObject.class); ApiObjectDoc apiObjectDoc = jsondocScanner.getApiObjectDocs(classes).iterator().next(); Assert.assertEquals("templateapiobject", apiObjectDoc.getName()); Iterator<ApiObjectFieldDoc> iterator = apiObjectDoc.getFields().iterator(); Assert.assertEquals("id", iterator.next().getName()); Assert.assertEquals("name", iterator.next().getName()); Assert.assertNotNull(apiObjectDoc.getJsondocTemplate()); } @Test public void testNoNameApiObjectDoc() { Set<Class<?>> classes = new HashSet<Class<?>>(); classes.add(NoNameApiObject.class); ApiObjectDoc apiObjectDoc = jsondocScanner.getApiObjectDocs(classes).iterator().next(); Assert.assertEquals("nonameapiobject", apiObjectDoc.getName()); Assert.assertEquals("id", apiObjectDoc.getFields().iterator().next().getName()); Assert.assertEquals(1, apiObjectDoc.getJsondochints().size()); } @Test public void testEnumObjectDoc() { Set<Class<?>> classes = new HashSet<Class<?>>(); classes.add(TestEnum.class); ApiObjectDoc childDoc = jsondocScanner.getApiObjectDocs(classes).iterator().next(); Assert.assertEquals("test-enum", childDoc.getName()); Assert.assertEquals(0, childDoc.getFields().size()); Assert.assertEquals(TestEnum.TESTENUM1.name(), childDoc.getAllowedvalues()[0]); Assert.assertEquals(TestEnum.TESTENUM2.name(), childDoc.getAllowedvalues()[1]); Assert.assertEquals(TestEnum.TESTENUM3.name(), childDoc.getAllowedvalues()[2]); } @Test public void testApiObjectDoc() { Set<Class<?>> classes = new HashSet<Class<?>>(); classes.add(TestObject.class); ApiObjectDoc childDoc = jsondocScanner.getApiObjectDocs(classes).iterator().next(); Assert.assertEquals("test-object", childDoc.getName()); Assert.assertEquals(14, childDoc.getFields().size()); Assert.assertEquals("1.0", childDoc.getSupportedversions().getSince()); Assert.assertEquals("2.12", childDoc.getSupportedversions().getUntil()); Assert.assertEquals(ApiVisibility.PUBLIC, childDoc.getVisibility()); Assert.assertEquals(ApiStage.PRE_ALPHA, childDoc.getStage()); for (ApiObjectFieldDoc fieldDoc : childDoc.getFields()) { if(fieldDoc.getName().equals("wildcardParametrized")) { Assert.assertEquals("list", fieldDoc.getJsondocType().getType().get(0)); } if(fieldDoc.getName().equals("unparametrizedList")) { Assert.assertEquals("list", fieldDoc.getJsondocType().getType().get(0)); } if(fieldDoc.getName().equals("parametrizedList")) { Assert.assertEquals("list of string", fieldDoc.getJsondocType().getOneLineText()); } if(fieldDoc.getName().equals("name")) { Assert.assertEquals("string", fieldDoc.getJsondocType().getType().get(0)); Assert.assertEquals("name", fieldDoc.getName()); Assert.assertEquals("true", fieldDoc.getRequired()); } if(fieldDoc.getName().equals("age")) { Assert.assertEquals("integer", fieldDoc.getJsondocType().getType().get(0)); Assert.assertEquals("age", fieldDoc.getName()); Assert.assertEquals("false", fieldDoc.getRequired()); } if(fieldDoc.getName().equals("avg")) { Assert.assertEquals("long", fieldDoc.getJsondocType().getType().get(0)); Assert.assertEquals("avg", fieldDoc.getName()); Assert.assertEquals("false", fieldDoc.getRequired()); } if(fieldDoc.getName().equals("map")) { Assert.assertEquals("map", fieldDoc.getJsondocType().getType().get(0)); Assert.assertEquals("string", fieldDoc.getJsondocType().getMapKey().getType().get(0)); Assert.assertEquals("integer", fieldDoc.getJsondocType().getMapValue().getType().get(0)); } if(fieldDoc.getName().equals("LongArray")) { Assert.assertEquals("array of long", fieldDoc.getJsondocType().getOneLineText()); Assert.assertEquals("LongArray", fieldDoc.getName()); Assert.assertEquals("false", fieldDoc.getRequired()); } if(fieldDoc.getName().equals("longArray")) { Assert.assertEquals("array of long", fieldDoc.getJsondocType().getOneLineText()); Assert.assertEquals("longArray", fieldDoc.getName()); Assert.assertEquals("false", fieldDoc.getRequired()); } if(fieldDoc.getName().equals("fooBar")) { Assert.assertEquals("string", fieldDoc.getJsondocType().getOneLineText()); Assert.assertEquals("foo_bar", fieldDoc.getName()); Assert.assertEquals("false", fieldDoc.getRequired()); } if(fieldDoc.getName().equals("version")) { Assert.assertEquals("string", fieldDoc.getJsondocType().getOneLineText()); Assert.assertEquals("1.0", fieldDoc.getSupportedversions().getSince()); Assert.assertEquals("2.12", fieldDoc.getSupportedversions().getUntil()); } if(fieldDoc.getName().equals("test-enum")) { Assert.assertEquals("test-enum", fieldDoc.getName()); Assert.assertEquals(TestEnum.TESTENUM1.name(), fieldDoc.getAllowedvalues()[0]); Assert.assertEquals(TestEnum.TESTENUM2.name(), fieldDoc.getAllowedvalues()[1]); Assert.assertEquals(TestEnum.TESTENUM3.name(), fieldDoc.getAllowedvalues()[2]); } if(fieldDoc.getName().equals("test-enum-with-allowed-values")) { Assert.assertEquals("A", fieldDoc.getAllowedvalues()[0]); Assert.assertEquals("B", fieldDoc.getAllowedvalues()[1]); Assert.assertEquals("C", fieldDoc.getAllowedvalues()[2]); } if(fieldDoc.getName().equals("orderedProperty")) { Assert.assertEquals("orderedProperty", fieldDoc.getName()); Assert.assertEquals(1, fieldDoc.getOrder().intValue()); } else { Assert.assertEquals(Integer.MAX_VALUE, fieldDoc.getOrder().intValue()); } } } }
package ar.edu.unc.famaf.redditreader.ui; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.annotation.TargetApi; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.text.TextUtils; import android.view.KeyEvent; import android.view.View; import android.view.View.OnClickListener; import android.view.inputmethod.EditorInfo; import android.widget.ArrayAdapter; import android.widget.AutoCompleteTextView; import android.widget.Button; import android.widget.EditText; import android.widget.TextView; import java.util.List; import ar.edu.unc.famaf.redditreader.R; /** * A login screen that offers login via email/password. */ public class LoginActivity extends AppCompatActivity { /** * Keep track of the login task to ensure we can cancel it if requested. */ private UserLoginTask mAuthTask = null; // UI references. private AutoCompleteTextView mEmailView; private EditText mPasswordView; private View mProgressView; private View mLoginFormView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_login); // Set up the login form. mEmailView = (AutoCompleteTextView) findViewById(R.id.email); mPasswordView = (EditText) findViewById(R.id.password); mPasswordView.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView textView, int id, KeyEvent keyEvent) { if (id == R.id.login || id == EditorInfo.IME_NULL) { attemptLogin(); return true; } return false; } }); Button mEmailSignInButton = (Button) findViewById(R.id.email_sign_in_button); mEmailSignInButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { attemptLogin(); } }); mLoginFormView = findViewById(R.id.login_form); mProgressView = findViewById(R.id.login_progress); } /** * Attempts to sign in or register the account specified by the login form. * If there are form errors (invalid email, missing fields, etc.), the * errors are presented and no actual login attempt is made. */ private void attemptLogin() { if (mAuthTask != null) { return; } // Reset errors. mEmailView.setError(null); mPasswordView.setError(null); // Store values at the time of the login attempt. String email = mEmailView.getText().toString(); String password = mPasswordView.getText().toString(); boolean cancel = false; View focusView = null; // Check for a valid password, if the user entered one. if (!TextUtils.isEmpty(password) && !isPasswordValid(password)) { mPasswordView.setError(getString(R.string.error_invalid_password)); focusView = mPasswordView; cancel = true; } // Check for a valid email address. if (TextUtils.isEmpty(email)) { mEmailView.setError(getString(R.string.error_field_required)); focusView = mEmailView; cancel = true; } else if (!isEmailValid(email)) { mEmailView.setError(getString(R.string.error_invalid_email)); focusView = mEmailView; cancel = true; } if (cancel) { // There was an error; don't attempt login and focus the first // form field with an error. focusView.requestFocus(); } else { // Show a progress spinner, and kick off a background task to // perform the user login attempt. showProgress(true); mAuthTask = new UserLoginTask(email, password); mAuthTask.execute((Void) null); } } private boolean isEmailValid(String email) { //TODO: Replace this with your own logic return email.contains("@"); } private boolean isPasswordValid(String password) { //TODO: Replace this with your own logic return password.length() > 4; } /** * Shows the progress UI and hides the login form. */ @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2) private void showProgress(final boolean show) { // On Honeycomb MR2 we have the ViewPropertyAnimator APIs, which allow // for very easy animations. If available, use these APIs to fade-in // the progress spinner. if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) { int shortAnimTime = getResources().getInteger(android.R.integer.config_shortAnimTime); mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); mLoginFormView.animate().setDuration(shortAnimTime).alpha( show ? 0 : 1).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); } }); mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mProgressView.animate().setDuration(shortAnimTime).alpha( show ? 1 : 0).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); } }); } else { // The ViewPropertyAnimator APIs are not available, so simply show // and hide the relevant UI components. mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); } } private void addEmailsToAutoComplete(List<String> emailAddressCollection) { //Create adapter to tell the AutoCompleteTextView what to show in its dropdown list. ArrayAdapter<String> adapter = new ArrayAdapter<>(LoginActivity.this, android.R.layout.simple_dropdown_item_1line, emailAddressCollection); mEmailView.setAdapter(adapter); } /** * Represents an asynchronous login/registration task used to authenticate * the user. */ public class UserLoginTask extends AsyncTask<Void, Void, Boolean> { private final String mEmail; private final String mPassword; UserLoginTask(String email, String password) { mEmail = email; mPassword = password; } @Override protected Boolean doInBackground(Void... params) { // TODO: attempt authentication against a network service. try { // Simulate network access. Thread.sleep(2000); } catch (InterruptedException e) { return false; } // TODO: register the new account here. return true; } @Override protected void onPostExecute(final Boolean success) { mAuthTask = null; showProgress(false); if (success) { finish(); } else { mPasswordView.setError(getString(R.string.error_incorrect_password)); mPasswordView.requestFocus(); } } @Override protected void onCancelled() { mAuthTask = null; showProgress(false); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.search; import com.carrotsearch.hppc.DoubleOpenHashSet; import com.carrotsearch.hppc.LongOpenHashSet; import com.carrotsearch.hppc.ObjectOpenHashSet; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.*; import org.apache.lucene.index.*; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.Index; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.indices.fielddata.breaker.DummyCircuitBreakerService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ElasticsearchTestCase; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.hamcrest.CoreMatchers.equalTo; /** */ public class FieldDataTermsFilterTests extends ElasticsearchTestCase { protected IndexFieldDataService ifdService; protected IndexWriter writer; protected AtomicReader reader; protected StringFieldMapper strMapper; protected LongFieldMapper lngMapper; protected DoubleFieldMapper dblMapper; @Before public void setup() throws Exception { super.setUp(); // setup field mappers strMapper = new StringFieldMapper.Builder("str_value") .build(new Mapper.BuilderContext(null, new ContentPath(1))); lngMapper = new LongFieldMapper.Builder("lng_value") .build(new Mapper.BuilderContext(null, new ContentPath(1))); dblMapper = new DoubleFieldMapper.Builder("dbl_value") .build(new Mapper.BuilderContext(null, new ContentPath(1))); // create index and fielddata service ifdService = new IndexFieldDataService(new Index("test"), new DummyCircuitBreakerService()); writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.VERSION, new StandardAnalyzer(Lucene.VERSION))); int numDocs = 10; for (int i = 0; i < numDocs; i++) { Document d = new Document(); d.add(new StringField(strMapper.names().indexName(), "str" + i, Field.Store.NO)); d.add(new LongField(lngMapper.names().indexName(), i, Field.Store.NO)); d.add(new DoubleField(dblMapper.names().indexName(), Double.valueOf(i), Field.Store.NO)); writer.addDocument(d); } reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(writer, true)); } @After public void tearDown() throws Exception { super.tearDown(); reader.close(); writer.close(); ifdService.clear(); SearchContext.removeCurrent(); } protected <IFD extends IndexFieldData> IFD getFieldData(FieldMapper fieldMapper) { return ifdService.getForField(fieldMapper); } protected <IFD extends IndexNumericFieldData> IFD getFieldData(NumberFieldMapper fieldMapper) { return ifdService.getForField(fieldMapper); } @Test public void testBytes() throws Exception { List<Integer> docs = Arrays.asList(1, 5, 7); ObjectOpenHashSet<BytesRef> hTerms = new ObjectOpenHashSet<BytesRef>(); List<BytesRef> cTerms = new ArrayList<BytesRef>(docs.size()); for (int i = 0; i < docs.size(); i++) { BytesRef term = new BytesRef("str" + docs.get(i)); hTerms.add(term); cTerms.add(term); } FieldDataTermsFilter hFilter = FieldDataTermsFilter.newBytes(getFieldData(strMapper), hTerms); int size = reader.maxDoc(); FixedBitSet result = new FixedBitSet(size); result.clear(0, size); assertThat(result.cardinality(), equalTo(0)); result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator()); assertThat(result.cardinality(), equalTo(docs.size())); for (int i = 0; i < reader.maxDoc(); i++) { assertThat(result.get(i), equalTo(docs.contains(i))); } // filter from mapper result.clear(0, size); assertThat(result.cardinality(), equalTo(0)); result.or(strMapper.termsFilter(ifdService, cTerms, null) .getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator()); assertThat(result.cardinality(), equalTo(docs.size())); for (int i = 0; i < reader.maxDoc(); i++) { assertThat(result.get(i), equalTo(docs.contains(i))); } result.clear(0, size); assertThat(result.cardinality(), equalTo(0)); // filter on a numeric field using BytesRef terms // should not match any docs hFilter = FieldDataTermsFilter.newBytes(getFieldData(lngMapper), hTerms); result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator()); assertThat(result.cardinality(), equalTo(0)); // filter on a numeric field using BytesRef terms // should not match any docs hFilter = FieldDataTermsFilter.newBytes(getFieldData(dblMapper), hTerms); result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator()); assertThat(result.cardinality(), equalTo(0)); } @Test public void testLongs() throws Exception { List<Integer> docs = Arrays.asList(1, 5, 7); LongOpenHashSet hTerms = new LongOpenHashSet(); List<Long> cTerms = new ArrayList<Long>(docs.size()); for (int i = 0; i < docs.size(); i++) { long term = docs.get(i).longValue(); hTerms.add(term); cTerms.add(term); } FieldDataTermsFilter hFilter = FieldDataTermsFilter.newLongs(getFieldData(lngMapper), hTerms); int size = reader.maxDoc(); FixedBitSet result = new FixedBitSet(size); result.clear(0, size); assertThat(result.cardinality(), equalTo(0)); result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator()); assertThat(result.cardinality(), equalTo(docs.size())); for (int i = 0; i < reader.maxDoc(); i++) { assertThat(result.get(i), equalTo(docs.contains(i))); } // filter from mapper result.clear(0, size); assertThat(result.cardinality(), equalTo(0)); result.or(lngMapper.termsFilter(ifdService, cTerms, null) .getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator()); assertThat(result.cardinality(), equalTo(docs.size())); for (int i = 0; i < reader.maxDoc(); i++) { assertThat(result.get(i), equalTo(docs.contains(i))); } hFilter = FieldDataTermsFilter.newLongs(getFieldData(dblMapper), hTerms); assertNull(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs())); } @Test public void testDoubles() throws Exception { List<Integer> docs = Arrays.asList(1, 5, 7); DoubleOpenHashSet hTerms = new DoubleOpenHashSet(); List<Double> cTerms = new ArrayList<Double>(docs.size()); for (int i = 0; i < docs.size(); i++) { double term = Double.valueOf(docs.get(i)); hTerms.add(term); cTerms.add(term); } FieldDataTermsFilter hFilter = FieldDataTermsFilter.newDoubles(getFieldData(dblMapper), hTerms); int size = reader.maxDoc(); FixedBitSet result = new FixedBitSet(size); result.clear(0, size); assertThat(result.cardinality(), equalTo(0)); result.or(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator()); assertThat(result.cardinality(), equalTo(docs.size())); for (int i = 0; i < reader.maxDoc(); i++) { assertThat(result.get(i), equalTo(docs.contains(i))); } // filter from mapper result.clear(0, size); assertThat(result.cardinality(), equalTo(0)); result.or(dblMapper.termsFilter(ifdService, cTerms, null) .getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator()); assertThat(result.cardinality(), equalTo(docs.size())); for (int i = 0; i < reader.maxDoc(); i++) { assertThat(result.get(i), equalTo(docs.contains(i))); } hFilter = FieldDataTermsFilter.newDoubles(getFieldData(lngMapper), hTerms); assertNull(hFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs())); } @Test public void testNoTerms() throws Exception { FieldDataTermsFilter hFilterBytes = FieldDataTermsFilter.newBytes(getFieldData(strMapper), new ObjectOpenHashSet<BytesRef>()); FieldDataTermsFilter hFilterLongs = FieldDataTermsFilter.newLongs(getFieldData(lngMapper), new LongOpenHashSet()); FieldDataTermsFilter hFilterDoubles = FieldDataTermsFilter.newDoubles(getFieldData(dblMapper), new DoubleOpenHashSet()); assertNull(hFilterBytes.getDocIdSet(reader.getContext(), reader.getLiveDocs())); assertNull(hFilterLongs.getDocIdSet(reader.getContext(), reader.getLiveDocs())); assertNull(hFilterDoubles.getDocIdSet(reader.getContext(), reader.getLiveDocs())); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.server; import com.facebook.presto.SessionRepresentation; import com.facebook.presto.execution.QueryInfo; import com.facebook.presto.execution.QueryState; import com.facebook.presto.operator.BlockedReason; import com.facebook.presto.spi.ErrorCode; import com.facebook.presto.spi.ErrorType; import com.facebook.presto.spi.QueryId; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableSet; import io.airlift.units.DataSize; import io.airlift.units.Duration; import org.joda.time.DateTime; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; import java.net.URI; import java.util.Set; import static com.google.common.base.MoreObjects.toStringHelper; import static com.google.common.base.Preconditions.checkArgument; import static java.util.Objects.requireNonNull; @Immutable public class BasicQueryInfo { private final QueryId queryId; private final SessionRepresentation session; private final QueryState state; private final ErrorType errorType; private final ErrorCode errorCode; private final boolean scheduled; private final boolean fullyBlocked; private final Set<BlockedReason> blockedReasons; private final URI self; private final String query; private final Duration elapsedTime; private final Duration executionTime; private final Duration cpuTime; private final DateTime endTime; private final DateTime createTime; private final DataSize currentMemory; private final DataSize peakMemory; private final double cumulativeMemory; private final int runningDrivers; private final int queuedDrivers; private final int completedDrivers; private final int totalDrivers; public BasicQueryInfo( QueryId queryId, SessionRepresentation session, QueryState state, ErrorType errorType, ErrorCode errorCode, boolean scheduled, boolean fullyBlocked, Set<BlockedReason> blockedReasons, URI self, String query, Duration elapsedTime, Duration executionTime, Duration cpuTime, DateTime endTime, DateTime createTime, DataSize currentMemory, DataSize peakMemory, double cumulativeMemory, int runningDrivers, int queuedDrivers, int completedDrivers, int totalDrivers) { this.queryId = requireNonNull(queryId, "queryId is null"); this.session = requireNonNull(session, "session is null"); this.state = requireNonNull(state, "state is null"); this.errorType = errorType; this.errorCode = errorCode; this.scheduled = scheduled; this.fullyBlocked = fullyBlocked; this.blockedReasons = ImmutableSet.copyOf(requireNonNull(blockedReasons, "blockedReasons is null")); this.self = requireNonNull(self, "self is null"); this.query = requireNonNull(query, "query is null"); this.elapsedTime = elapsedTime; this.executionTime = executionTime; this.cpuTime = cpuTime; this.endTime = endTime; this.createTime = createTime; this.currentMemory = currentMemory; this.peakMemory = peakMemory; this.cumulativeMemory = cumulativeMemory; checkArgument(runningDrivers >= 0, "runningDrivers is less than zero"); this.runningDrivers = runningDrivers; checkArgument(queuedDrivers >= 0, "queuedDrivers is less than zero"); this.queuedDrivers = queuedDrivers; checkArgument(completedDrivers >= 0, "completedDrivers is less than zero"); this.completedDrivers = completedDrivers; checkArgument(totalDrivers >= 0, "totalDrivers is less than zero"); this.totalDrivers = totalDrivers; } public BasicQueryInfo(QueryInfo queryInfo) { this(queryInfo.getQueryId(), queryInfo.getSession(), queryInfo.getState(), queryInfo.getErrorType(), queryInfo.getErrorCode(), queryInfo.isScheduled(), queryInfo.getQueryStats().isFullyBlocked(), queryInfo.getQueryStats().getBlockedReasons(), queryInfo.getSelf(), queryInfo.getQuery(), queryInfo.getQueryStats().getElapsedTime(), queryInfo.getQueryStats().getExecutionTime(), queryInfo.getQueryStats().getTotalCpuTime(), queryInfo.getQueryStats().getEndTime(), queryInfo.getQueryStats().getCreateTime(), queryInfo.getQueryStats().getTotalMemoryReservation(), queryInfo.getQueryStats().getPeakMemoryReservation(), queryInfo.getQueryStats().getCumulativeMemory(), queryInfo.getQueryStats().getRunningDrivers(), queryInfo.getQueryStats().getQueuedDrivers(), queryInfo.getQueryStats().getCompletedDrivers(), queryInfo.getQueryStats().getTotalDrivers()); } @JsonProperty public QueryId getQueryId() { return queryId; } @JsonProperty public SessionRepresentation getSession() { return session; } @JsonProperty public QueryState getState() { return state; } @Nullable @JsonProperty public ErrorType getErrorType() { return errorType; } @Nullable @JsonProperty public ErrorCode getErrorCode() { return errorCode; } @JsonProperty public boolean isScheduled() { return scheduled; } @JsonProperty public boolean isFullyBlocked() { return fullyBlocked; } @JsonProperty public Set<BlockedReason> getBlockedReasons() { return blockedReasons; } @JsonProperty public URI getSelf() { return self; } @JsonProperty public String getQuery() { return query; } @JsonProperty public long getExecutionTimeMillis() { return executionTime.toMillis(); } @JsonProperty public long getCpuTimeMillis() { return cpuTime.toMillis(); } @JsonProperty public long getElapsedTimeMillis() { return elapsedTime.toMillis(); } @JsonProperty public DateTime getEndTime() { return endTime; } @JsonProperty public int getRunningDrivers() { return runningDrivers; } @JsonProperty public int getQueuedDrivers() { return queuedDrivers; } @JsonProperty public int getTotalDrivers() { return totalDrivers; } @JsonProperty public int getCompletedDrivers() { return completedDrivers; } @JsonProperty public DateTime getCreateTime() { return createTime; } @JsonProperty public double getCumulativeMemory() { return cumulativeMemory; } @JsonProperty public long getCurrentMemoryBytes() { return currentMemory.toBytes(); } @JsonProperty public long getPeakMemoryBytes() { return peakMemory.toBytes(); } @Override public String toString() { return toStringHelper(this) .add("queryId", queryId) .add("state", state) .toString(); } }
package psidev.psi.mi.jami.crosslink.io.parser; import com.googlecode.jcsv.reader.CSVEntryParser; import psidev.psi.mi.jami.crosslink.extension.*; import psidev.psi.mi.jami.crosslink.listener.CsvParserListener; import psidev.psi.mi.jami.crosslink.utils.CsvUtils; import psidev.psi.mi.jami.datasource.DefaultFileSourceContext; import psidev.psi.mi.jami.datasource.FileSourceContext; import psidev.psi.mi.jami.model.*; import psidev.psi.mi.jami.model.impl.DefaultExperiment; import psidev.psi.mi.jami.model.impl.DefaultPosition; import psidev.psi.mi.jami.utils.CvTermUtils; import java.util.*; /** * Abstract class for crosslink CSV parser * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>22/08/14</pre> */ public abstract class AbstractCsvInteractionEvidenceParser<T extends InteractionEvidence> implements CSVEntryParser<T> { private Map<Integer, CrossLinkCSVColumns> columnsIndex=null; private int currentLineIndex=0; private CsvParserListener parserListener; private boolean isStarted = false; /** * <p>parseEntry.</p> * * @param data a {@link java.lang.String} object. * @return a T object. */ public T parseEntry(String... data) { isStarted = true; if (data == null){ return null; } // increments current line index currentLineIndex++; // initialise columns if (columnsIndex == null) { return null; } // parse data else{ String protein1 = null; String protein2 = null; String pepPos1 = null; String pepPos2 = null; String linkPos1 = null; String linkPos2 = null; int index = 0; int protein1Index = -1; int protein2Index = -1; int pepPos1Index = -1; int pepPos2Index = -1; int linkPos1Index = -1; int linkPos2Index = -1; String bait=null; for (String value : data){ // the column index is recognized if (this.columnsIndex.containsKey(index)){ CrossLinkCSVColumns colName = this.columnsIndex.get(index); switch (colName){ case protein1: protein1 = value; protein1Index = index; break; case protein2: protein2 = value; protein2Index = index; break; case peppos1: pepPos1 = value; pepPos1Index = index; break; case peppos2: pepPos2 = value; pepPos2Index = index; break; case linkpos1: linkPos1 = value; linkPos1Index = index; break; case linkpos2: linkPos2 = value; linkPos2Index = index; break; case narygroup: bait = value; break; } } index++; } T interaction = instantiateInteractionEvidence(currentLineIndex, bait); if (protein1 != null){ ParticipantEvidence participant1 = createParticipantEvidence(protein1, protein1Index, pepPos1, pepPos1Index, linkPos1, linkPos1Index); ParticipantEvidence participant2=null; if (protein2 != null){ participant2 = createParticipantEvidence(protein2, protein2Index, pepPos2, pepPos2Index, linkPos2, linkPos2Index); } if (participant1 != null){ interaction.addParticipant(participant1); if (participant2 != null){ interaction.addParticipant(participant2); if (participant1.getFeatures().size() == 1 && participant2.getFeatures().size() == 1){ FeatureEvidence f1 = participant1.getFeatures().iterator().next(); FeatureEvidence f2 = participant2.getFeatures().iterator().next(); f1.getLinkedFeatures().add(f2); f2.getLinkedFeatures().add(f1); } } // set interaction type interaction.setInteractionType(CvTermUtils.createMICvTerm(CsvUtils.PHYSICAL_INTERACTION, CsvUtils.PHYSICAL_INTERACTION_MI)); // create experiment Experiment exp = new DefaultExperiment(null); // set interaction detection method exp.setInteractionDetectionMethod(CvTermUtils.createMICvTerm(CsvUtils.CROSS_LINK, CsvUtils.CROSS_LINK_MI)); // set experiment interaction.setExperiment(exp); return interaction; } } else{ processNoProtein1Error(currentLineIndex); return interaction; } return interaction; } } /** * <p>Getter for the field <code>parserListener</code>.</p> * * @return a {@link psidev.psi.mi.jami.crosslink.listener.CsvParserListener} object. */ public CsvParserListener getParserListener() { return parserListener; } /** * <p>Setter for the field <code>parserListener</code>.</p> * * @param parserListener a {@link psidev.psi.mi.jami.crosslink.listener.CsvParserListener} object. */ public void setParserListener(CsvParserListener parserListener) { this.parserListener = parserListener; } /** * <p>isStarted.</p> * * @return a boolean. */ public boolean isStarted() { return isStarted; } /** * <p>createParticipantEvidence.</p> * * @param protein1 a {@link java.lang.String} object. * @param protein1Index a int. * @param pepPos a {@link java.lang.String} object. * @param pepPos1Index a int. * @param linkedPos a {@link java.lang.String} object. * @param linkedPosIndex a int. * @return a {@link psidev.psi.mi.jami.model.ParticipantEvidence} object. */ protected ParticipantEvidence createParticipantEvidence(String protein1, int protein1Index, String pepPos, int pepPos1Index, String linkedPos, int linkedPosIndex) { // parse proteins List<CsvProtein> csvProteins1 = createProteinsFromString(protein1, currentLineIndex, protein1Index); List<CsvRange> positions = parseCrossLinkingFeatures(pepPos, linkedPos, currentLineIndex, pepPos1Index, linkedPosIndex); // check if we have more than one position and we need adjusting if (!positions.isEmpty()){ // the same protein has different ranges and need to be duplicated in a participant set if (positions.size() > 1 && csvProteins1.size() == 1){ CsvProtein firstProt = csvProteins1.iterator().next(); // we duplicate the protein for (int i=1; i < positions.size(); i++){ csvProteins1.add(firstProt); } } } // parse participantEvidence ParticipantEvidence participantEvidence1 = null; // simple participant if (csvProteins1.size() == 1 && positions.size() == 1){ participantEvidence1 = createParticipantEvidence(csvProteins1.iterator().next(), currentLineIndex, protein1Index, positions.iterator().next()); } // participant no linked features else if (csvProteins1.size() == 1 && positions.isEmpty()){ participantEvidence1 = createParticipantEvidence(csvProteins1.iterator().next(), currentLineIndex, protein1Index, null); } // we have an error, the number of positions does not match the number of proteins and we have ambiguous results else if (positions.size() > 0 && positions.size() != csvProteins1.size()){ processMismatchProteinPositions(positions, csvProteins1); } // participant pool else { participantEvidence1 = createExperimentalParticipantPool(csvProteins1, currentLineIndex, protein1Index, positions); } return participantEvidence1; } /** * <p>parseCrossLinkingFeatures.</p> * * @param pepPos a {@link java.lang.String} object. * @param linkedPos a {@link java.lang.String} object. * @param lineNumber a int. * @param pepColumnNumber a int. * @param linkedColumnNumber a int. * @return a {@link java.util.List} object. */ protected List<CsvRange> parseCrossLinkingFeatures(String pepPos, String linkedPos, int lineNumber, int pepColumnNumber, int linkedColumnNumber) { List<CsvRange> positions = Collections.EMPTY_LIST; // parse positions // the position is relative to the peptide if (pepPos != null && linkedPos != null){ List<CsvRange> peptidePositions = parsePositions(pepPos, lineNumber, pepColumnNumber); List<CsvRange> relativePositions = parsePositions(linkedPos, lineNumber, linkedColumnNumber); if (!peptidePositions.isEmpty() && !relativePositions.isEmpty()){ // the same protein has different ranges and need to be duplicated in a participant set if (relativePositions.size() > 1 && peptidePositions.size() == 1){ CsvRange firstPepPosition = peptidePositions.iterator().next(); // we duplicate the protein for (int i=1; i < relativePositions.size(); i++){ relativePositions.add(firstPepPosition); } } // we have an error, the number of positions does not match the number of peptide positions and we have ambiguous results else if (relativePositions.size() > 0 && relativePositions.size() != peptidePositions.size()){ processMismatchPeptidePositions(peptidePositions, relativePositions); } // compute final positions positions = new ArrayList<CsvRange>(relativePositions.size()); for (int i=0; i < relativePositions.size(); i++){ CsvRange finalRange = new CsvRange( new DefaultPosition(relativePositions.get(i).getStart().getStart() + peptidePositions.get(i).getStart().getStart()), new DefaultPosition(relativePositions.get(i).getEnd().getEnd() + peptidePositions.get(i).getEnd().getEnd())); finalRange.setSourceLocator(relativePositions.get(i).getSourceLocator()); positions.add(finalRange); } } else{ processMismatchPeptidePositions(peptidePositions, relativePositions); } } // the position is absolute else if (pepPos == null && linkedPos != null){ positions = parsePositions(linkedPos, lineNumber, linkedColumnNumber); } return positions; } /** * <p>parsePositions.</p> * * @param pos a {@link java.lang.String} object. * @param lineNumber a int. * @param colNumber a int. * @return a {@link java.util.List} object. */ protected List<CsvRange> parsePositions(String pos, int lineNumber, int colNumber){ // several ranges are present if (pos.contains(CsvUtils.PROTEIN_SEPARATOR)){ String[] ranges = pos.split(CsvUtils.PROTEIN_SEPARATOR); List<CsvRange> positions = new ArrayList<CsvRange>(ranges.length); for (String p : ranges){ try { Long posValue = Long.parseLong(p.trim()); CsvRange range = new CsvRange(new DefaultPosition(posValue), new DefaultPosition(posValue)); range.setSourceLocator(new CsvSourceLocator(lineNumber, -1, colNumber)); positions.add(range); } catch (NumberFormatException e){ processInvalidPosition("Invalid range positions: "+ e.getMessage(), new DefaultFileSourceContext(new CsvSourceLocator(lineNumber, -1, colNumber))); positions.add(null); return Collections.EMPTY_LIST; } } return positions; } // only one range else{ try { Long posValue = Long.parseLong(pos.trim()); CsvRange range = new CsvRange(new DefaultPosition(posValue), new DefaultPosition(posValue)); range.setSourceLocator(new CsvSourceLocator(lineNumber, -1, colNumber)); return Arrays.asList(range); } catch (NumberFormatException e){ processInvalidPosition("Invalid range positions: "+ e.getMessage(), new DefaultFileSourceContext(new CsvSourceLocator(lineNumber, -1, colNumber))); return Collections.EMPTY_LIST; } } } /** * <p>createParticipantEvidence.</p> * * @param csvProtein a {@link psidev.psi.mi.jami.crosslink.extension.CsvProtein} object. * @param lineNumber a int. * @param columnNumber a int. * @param range a {@link psidev.psi.mi.jami.crosslink.extension.CsvRange} object. * @return a {@link psidev.psi.mi.jami.crosslink.extension.CsvParticipantEvidence} object. */ protected CsvParticipantEvidence createParticipantEvidence(CsvProtein csvProtein, int lineNumber, int columnNumber, CsvRange range) { CsvParticipantEvidence participant = new CsvParticipantEvidence(csvProtein); // inti bio role participant.setBiologicalRole(CvTermUtils.createUnspecifiedRole()); // inti exp role participant.setExperimentalRole(CvTermUtils.createMICvTerm(Participant.NEUTRAL, Participant.NEUTRAL_MI)); // init identification method participant.getIdentificationMethods().add(CvTermUtils.createMICvTerm(CsvUtils.SEQUENCE_TAG, CsvUtils.SEQUENCE_TAG_MI)); participant.setSourceLocator(new CsvSourceLocator(lineNumber, -1, columnNumber)); // range not null, create feature if (range != null){ CsvFeatureEvidence featureEvidence = createCrossLinkFeatureEvidence(range); // add feature to participant participant.addFeature(featureEvidence); } return participant; } /** * <p>createCrossLinkFeatureEvidence.</p> * * @param range a {@link psidev.psi.mi.jami.crosslink.extension.CsvRange} object. * @return a {@link psidev.psi.mi.jami.crosslink.extension.CsvFeatureEvidence} object. */ protected CsvFeatureEvidence createCrossLinkFeatureEvidence(CsvRange range) { CsvFeatureEvidence featureEvidence = new CsvFeatureEvidence(); // set type to crosslinker featureEvidence.setType(CvTermUtils.createMICvTerm(CsvUtils.CROSS_LINKER, CsvUtils.CROSS_LINKER_MI)); // set source locator featureEvidence.setSourceLocator(range.getSourceLocator()); // add range featureEvidence.getRanges().add(range); return featureEvidence; } /** * <p>createExperimentalParticipantPool.</p> * * @param csvProteins a {@link java.util.List} object. * @param lineNumber a int. * @param columnNumber a int. * @param csvRanges a {@link java.util.List} object. * @return a {@link psidev.psi.mi.jami.crosslink.extension.CsvExperimentalParticipantPool} object. */ protected CsvExperimentalParticipantPool createExperimentalParticipantPool(List<CsvProtein> csvProteins, int lineNumber, int columnNumber, List<CsvRange> csvRanges) { CsvExperimentalParticipantPool participant = new CsvExperimentalParticipantPool("interactor set "+lineNumber+"-"+columnNumber); // inti bio role participant.setBiologicalRole(CvTermUtils.createUnspecifiedRole()); // inti exp role participant.setExperimentalRole(CvTermUtils.createMICvTerm(Participant.NEUTRAL, Participant.NEUTRAL_MI)); // init identification method participant.getIdentificationMethods().add(CvTermUtils.createMICvTerm(CsvUtils.SEQUENCE_TAG, CsvUtils.SEQUENCE_TAG_MI)); participant.setSourceLocator(new CsvSourceLocator(lineNumber, -1, columnNumber)); for (int i = 0; i < csvProteins.size(); i++){ CsvProtein prot = csvProteins.get(i); CsvRange range = csvRanges.isEmpty() ? null : csvRanges.get(i); CsvExperimentalParticipantCandidate candidate = new CsvExperimentalParticipantCandidate(prot); candidate.setSourceLocator(prot.getSourceLocator()); participant.add(candidate); // range not null, create feature if (range != null){ CsvFeatureEvidence featureEvidence = createCrossLinkFeatureEvidence(range); // add feature to participant candidate.addFeature(featureEvidence); } } return participant; } /** * <p>createProteinsFromString.</p> * * @param protein1 a {@link java.lang.String} object. * @param lineNumber a int. * @param columnNumber a int. * @return a {@link java.util.List} object. */ protected List<CsvProtein> createProteinsFromString(String protein1, int lineNumber, int columnNumber){ List<CsvProtein> proteins = new ArrayList<CsvProtein>(); // several proteins are present if (protein1.contains(CsvUtils.PROTEIN_SEPARATOR)){ String[] proteinIds = protein1.split(CsvUtils.PROTEIN_SEPARATOR); for (String proteinId : proteinIds){ // we have identifier and name CsvProtein csvProtein = createProteinFromNameAndIdentifier(proteinId, lineNumber, columnNumber); if (csvProtein != null){ proteins.add(csvProtein); } } } // only one entry else{ // we have identifier and name CsvProtein csvProtein = createProteinFromNameAndIdentifier(protein1, lineNumber, columnNumber); if (csvProtein != null){ proteins.add(csvProtein); } } return proteins; } /** * <p>createProteinFromNameAndIdentifier.</p> * * @param protein a {@link java.lang.String} object. * @param lineNumber a int. * @param columnNumber a int. * @return a {@link psidev.psi.mi.jami.crosslink.extension.CsvProtein} object. */ protected CsvProtein createProteinFromNameAndIdentifier(String protein, int lineNumber, int columnNumber){ // we have identifier and name if (protein.contains(CsvUtils.XREF_SEPARATOR)){ String[] identifiers = protein.split("\\"+CsvUtils.XREF_SEPARATOR); if (identifiers.length != 3){ processProteinIdentifiersError(identifiers, lineNumber, columnNumber); return null; } else{ CsvProtein prot = new CsvProtein(identifiers[2], new CsvXref(CvTermUtils.createUniprotkbDatabase(), identifiers[1].trim(), CvTermUtils.createIdentityQualifier())); prot.setSourceLocator(new CsvSourceLocator(lineNumber, -1, columnNumber)); return prot; } } // the name and identifier will be the same else { CsvProtein prot = new CsvProtein(protein, new CsvXref(CvTermUtils.createUniprotkbDatabase(), protein.trim(), CvTermUtils.createIdentityQualifier())); prot.setSourceLocator(new CsvSourceLocator(lineNumber, -1, columnNumber)); return prot; } } /** * <p>instantiateInteractionEvidence.</p> * * @param linePosition a int. * @param bait a {@link java.lang.String} object. * @return a T object. */ protected abstract T instantiateInteractionEvidence(int linePosition, String bait); /** * <p>processMismatchPeptidePositions.</p> * * @param peptidePositions a {@link java.util.List} object. * @param linkedPositions a {@link java.util.List} object. */ protected void processMismatchPeptidePositions(List<CsvRange> peptidePositions, List<CsvRange> linkedPositions){ if (this.parserListener != null){ this.parserListener.onMismatchBetweenPeptideAndLinkedPositions(peptidePositions, linkedPositions); } } /** * <p>processMismatchProteinPositions.</p> * * @param rangePositions a {@link java.util.List} object. * @param proteins a {@link java.util.List} object. */ protected void processMismatchProteinPositions(List<CsvRange> rangePositions, List<CsvProtein> proteins){ if (this.parserListener != null){ this.parserListener.onMismatchBetweenRangePositionsAndProteins(rangePositions, proteins); } } /** * <p>processInvalidPosition.</p> * * @param message a {@link java.lang.String} object. * @param context a {@link psidev.psi.mi.jami.datasource.FileSourceContext} object. */ protected void processInvalidPosition(String message, FileSourceContext context){ if (this.parserListener != null){ this.parserListener.onInvalidPosition(message, context); } } /** * <p>processProteinIdentifiersError.</p> * * @param identifiers an array of {@link java.lang.String} objects. * @param lineNumber a int. * @param columnNumber a int. */ protected void processProteinIdentifiersError(String[] identifiers, int lineNumber, int columnNumber){ if (this.parserListener != null){ this.parserListener.onInvalidProteinIdentifierSyntax(identifiers, lineNumber, columnNumber); } } /** * <p>processNoProtein1Error.</p> * * @param lineNumber a int. */ protected void processNoProtein1Error(int lineNumber){ if (this.parserListener != null){ this.parserListener.onMissingProtein1Column(lineNumber); } } /** * <p>initialiseColumnNames.</p> * * @param data a {@link java.util.List} object. */ public void initialiseColumnNames(List<String> data){ currentLineIndex++; columnsIndex = new HashMap<Integer, CrossLinkCSVColumns>(CrossLinkCSVColumns.values().length); int index = 0; for (String name : data){ CrossLinkCSVColumns colName = CrossLinkCSVColumns.convertFromString(name.trim()); if (colName != null){ columnsIndex.put(index, colName); } index++; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.cluster; import java.io.Externalizable; import java.io.File; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Collection; import java.util.Map; import java.util.UUID; import java.util.concurrent.ConcurrentMap; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteCluster; import org.apache.ignite.IgniteException; import org.apache.ignite.cluster.BaselineNode; import org.apache.ignite.cluster.ClusterGroup; import org.apache.ignite.cluster.ClusterMetrics; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.cluster.ClusterStartNodeResult; import org.apache.ignite.internal.AsyncSupportAdapter; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgnitePredicate; import org.jetbrains.annotations.Nullable; /** * */ public class IgniteClusterAsyncImpl extends AsyncSupportAdapter<IgniteCluster> implements IgniteCluster, Externalizable { /** */ private static final long serialVersionUID = 0L; /** */ private IgniteClusterImpl cluster; /** * Required by {@link Externalizable}. */ public IgniteClusterAsyncImpl() { // No-op. } /** * @param cluster Cluster. */ public IgniteClusterAsyncImpl(IgniteClusterImpl cluster) { super(true); this.cluster = cluster; } /** {@inheritDoc} */ @Override public ClusterNode localNode() { return cluster.localNode(); } /** {@inheritDoc} */ @Override public ClusterGroup forLocal() { return cluster.forLocal(); } /** {@inheritDoc} */ @Override public <K, V> ConcurrentMap<K, V> nodeLocalMap() { return cluster.nodeLocalMap(); } /** {@inheritDoc} */ @Override public boolean pingNode(UUID nodeId) { return cluster.pingNode(nodeId); } /** {@inheritDoc} */ @Override public long topologyVersion() { return cluster.topologyVersion(); } /** {@inheritDoc} */ @Nullable @Override public Collection<ClusterNode> topology(long topVer) { return cluster.topology(topVer); } /** {@inheritDoc} */ @Override public Collection<ClusterStartNodeResult> startNodes(File file, boolean restart, int timeout, int maxConn) { try { return saveOrGet(cluster.startNodesAsync0(file, restart, timeout, maxConn)); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public IgniteFuture<Collection<ClusterStartNodeResult>> startNodesAsync(File file, boolean restart, int timeout, int maxConn) throws IgniteException { return cluster.startNodesAsync(file, restart, timeout, maxConn); } /** {@inheritDoc} */ @Override public Collection<ClusterStartNodeResult> startNodes( Collection<Map<String, Object>> hosts, @Nullable Map<String, Object> dflts, boolean restart, int timeout, int maxConn) { try { return saveOrGet(cluster.startNodesAsync0(hosts, dflts, restart, timeout, maxConn)); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public IgniteFuture<Collection<ClusterStartNodeResult>> startNodesAsync( Collection<Map<String, Object>> hosts, @Nullable Map<String, Object> dflts, boolean restart, int timeout, int maxConn) throws IgniteException { return cluster.startNodesAsync(hosts, dflts, restart, timeout, maxConn); } /** {@inheritDoc} */ @Override public void stopNodes() { cluster.stopNodes(); } /** {@inheritDoc} */ @Override public void stopNodes(Collection<UUID> ids) { cluster.stopNodes(ids); } /** {@inheritDoc} */ @Override public void restartNodes() { cluster.restartNodes(); } /** {@inheritDoc} */ @Override public void restartNodes(Collection<UUID> ids) { cluster.restartNodes(ids); } /** {@inheritDoc} */ @Override public void resetMetrics() { cluster.resetMetrics(); } /** {@inheritDoc} */ @Override public void enableStatistics(Collection<String> caches, boolean enabled) { cluster.enableStatistics(caches, enabled); } /** {@inheritDoc} */ @Override public void clearStatistics(Collection<String> caches) { cluster.clearStatistics(caches); } /** {@inheritDoc} */ @Override public void setTxTimeoutOnPartitionMapExchange(long timeout) { cluster.setTxTimeoutOnPartitionMapExchange(timeout); } /** {@inheritDoc} */ @Override public Ignite ignite() { return cluster.ignite(); } /** {@inheritDoc} */ @Override public ClusterGroup forNodes(Collection<? extends ClusterNode> nodes) { return cluster.forNodes(nodes); } /** {@inheritDoc} */ @Override public ClusterGroup forNode(ClusterNode node, ClusterNode... nodes) { return cluster.forNode(node, nodes); } /** {@inheritDoc} */ @Override public ClusterGroup forOthers(ClusterNode node, ClusterNode... nodes) { return cluster.forOthers(node, nodes); } /** {@inheritDoc} */ @Override public ClusterGroup forOthers(ClusterGroup prj) { return cluster.forOthers(prj); } /** {@inheritDoc} */ @Override public ClusterGroup forNodeIds(Collection<UUID> ids) { return cluster.forNodeIds(ids); } /** {@inheritDoc} */ @Override public ClusterGroup forNodeId(UUID id, UUID... ids) { return cluster.forNodeId(id, ids); } /** {@inheritDoc} */ @Override public ClusterGroup forPredicate(IgnitePredicate<ClusterNode> p) { return cluster.forPredicate(p); } /** {@inheritDoc} */ @Override public ClusterGroup forAttribute(String name, @Nullable Object val) { return cluster.forAttribute(name, val); } /** {@inheritDoc} */ @Override public ClusterGroup forServers() { return cluster.forServers(); } /** {@inheritDoc} */ @Override public ClusterGroup forClients() { return cluster.forClients(); } /** {@inheritDoc} */ @Override public ClusterGroup forCacheNodes(String cacheName) { return cluster.forCacheNodes(cacheName); } /** {@inheritDoc} */ @Override public ClusterGroup forDataNodes(String cacheName) { return cluster.forDataNodes(cacheName); } /** {@inheritDoc} */ @Override public ClusterGroup forClientNodes(String cacheName) { return cluster.forClientNodes(cacheName); } /** {@inheritDoc} */ @Override public ClusterGroup forRemotes() { return cluster.forRemotes(); } /** {@inheritDoc} */ @Override public ClusterGroup forHost(ClusterNode node) { return cluster.forHost(node); } /** {@inheritDoc} */ @Override public ClusterGroup forHost(String host, String... hosts) { return cluster.forHost(host, hosts); } /** {@inheritDoc} */ @Override public ClusterGroup forDaemons() { return cluster.forDaemons(); } /** {@inheritDoc} */ @Override public ClusterGroup forRandom() { return cluster.forRandom(); } /** {@inheritDoc} */ @Override public ClusterGroup forOldest() { return cluster.forOldest(); } /** {@inheritDoc} */ @Override public ClusterGroup forYoungest() { return cluster.forYoungest(); } /** {@inheritDoc} */ @Override public Collection<ClusterNode> nodes() { return cluster.nodes(); } /** {@inheritDoc} */ @Nullable @Override public ClusterNode node(UUID id) { return cluster.node(id); } /** {@inheritDoc} */ @Override public Collection<String> hostNames() { return cluster.hostNames(); } /** {@inheritDoc} */ @Nullable @Override public ClusterNode node() { return cluster.node(); } /** {@inheritDoc} */ @Override public IgnitePredicate<ClusterNode> predicate() { return cluster.predicate(); } /** {@inheritDoc} */ @Override public ClusterMetrics metrics() { return cluster.metrics(); } /** {@inheritDoc} */ @Override public boolean active() { return false; } /** {@inheritDoc} */ @Override public void active(boolean active) { } /** {@inheritDoc} */ @Nullable @Override public Collection<BaselineNode> currentBaselineTopology() { return null; } /** {@inheritDoc} */ @Override public void setBaselineTopology(Collection<? extends BaselineNode> baselineTop) { } /** {@inheritDoc} */ @Override public void setBaselineTopology(long topVer) { } /** {@inheritDoc} */ @Nullable @Override public IgniteFuture<?> clientReconnectFuture() { return cluster.clientReconnectFuture(); } /** {@inheritDoc} */ @Override public boolean enableWal(String cacheName) throws IgniteException { return cluster.enableWal(cacheName); } /** {@inheritDoc} */ @Override public boolean disableWal(String cacheName) throws IgniteException { return cluster.disableWal(cacheName); } /** {@inheritDoc} */ @Override public boolean isWalEnabled(String cacheName) { return cluster.isWalEnabled(cacheName); } /** {@inheritDoc} */ @Override public boolean isBaselineAutoAdjustEnabled() { return cluster.isBaselineAutoAdjustEnabled(); } /** {@inheritDoc} */ @Override public void baselineAutoAdjustEnabled(boolean baselineAutoAdjustEnabled) throws IgniteException { cluster.baselineAutoAdjustEnabled(baselineAutoAdjustEnabled); } /** * @param baselineAutoAdjustEnabled Value of manual baseline control or auto adjusting baseline. {@code True} If * cluster in auto-adjust. {@code False} If cluster in manuale. * @return Future for await operation completion. */ public IgniteFuture<?> baselineAutoAdjustEnabledAsync(boolean baselineAutoAdjustEnabled) { return cluster.baselineAutoAdjustEnabledAsync(baselineAutoAdjustEnabled); } /** {@inheritDoc} */ @Override public long baselineAutoAdjustTimeout() { return cluster.baselineAutoAdjustTimeout(); } /** {@inheritDoc} */ @Override public void baselineAutoAdjustTimeout(long baselineAutoAdjustTimeout) throws IgniteException { cluster.baselineAutoAdjustTimeout(baselineAutoAdjustTimeout); } /** * @param baselineAutoAdjustTimeout Value of time which we would wait before the actual topology change since last * server topology change (node join/left/fail). * @return Future for await operation completion. */ public IgniteFuture<?> baselineAutoAdjustTimeoutAsync(long baselineAutoAdjustTimeout) { return cluster.baselineAutoAdjustTimeoutAsync(baselineAutoAdjustTimeout); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { cluster = (IgniteClusterImpl)in.readObject(); } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(cluster); } }
package com.elastisys.scale.commons.rest.auth; import static com.elastisys.scale.commons.rest.auth.IsError.error; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.notNullValue; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.nio.charset.StandardCharsets; import java.util.Base64; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.eclipse.jetty.server.Server; import org.glassfish.jersey.servlet.ServletContainer; import org.joda.time.DateTime; import org.jose4j.jwk.RsaJsonWebKey; import org.jose4j.jwk.RsaJwkGenerator; import org.jose4j.jws.AlgorithmIdentifiers; import org.jose4j.jws.JsonWebSignature; import org.jose4j.jwt.JwtClaims; import org.jose4j.jwt.NumericDate; import org.jose4j.lang.JoseException; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.Matchers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.elastisys.scale.commons.json.types.ErrorType; import com.elastisys.scale.commons.net.host.HostUtils; import com.elastisys.scale.commons.rest.client.RestClients; import com.elastisys.scale.commons.security.jwt.AuthTokenValidator; import com.elastisys.scale.commons.server.ServletDefinition; import com.elastisys.scale.commons.server.ServletServerBuilder; import com.elastisys.scale.commons.server.SslKeyStoreType; import com.elastisys.scale.commons.util.io.Resources; import com.elastisys.scale.commons.util.time.FrozenTime; import com.elastisys.scale.commons.util.time.UtcTime; /** * Tests the {@link AuthTokenRequestFilter} by setting up a server with a dummy * JAX-RS application with some resources requiring JWT authentication * (annotated with {@link RequireJwtAuthentication}) and some not requiring * authentication, and access the server resources using a mix of authenticated * and non-authenticated client requests. */ public class TestAuthTokenRequestFilter { private static final String TOKEN_ISSUER = "Elastisys"; private static final Logger LOG = LoggerFactory.getLogger(TestAuthTokenRequestFilter.class); private static final String SERVER_KEYSTORE_PATH = Resources.getResource("security/server_keystore.p12").toString(); private static final String SERVER_KEYSTORE_PASSWORD = "serverpassword"; private static int httpsPort = HostUtils.findFreePorts(1).get(0); private Server server; /** The signature key pair used to sign and verify auth tokens. */ private RsaJsonWebKey signatureKeyPair; /** Object under test. */ private static AuthTokenRequestFilter authTokenFilter; @Before public void beforeTestMethod() throws Exception { // test methods start need to start their own server this.server = null; FrozenTime.setFixed(UtcTime.parse("2015-01-01T12:00:00.000Z")); this.signatureKeyPair = RsaJwkGenerator.generateJwk(2048); this.signatureKeyPair.setKeyId(TOKEN_ISSUER + "-signkey"); } @After public void afterTestMethod() throws Exception { // tear down the server is one was started by the test method if (this.server != null) { this.server.stop(); } } /** * Verify that a request without the {@code Authorization} header with an * authentication token when requesting a protected resource fails. */ @Test public void accessProtectedResourceWithoutAuthToken() throws Exception { startServer(new AsymmetricKeyAuthTokenValidator(this.signatureKeyPair).withExpectedIssuer(TOKEN_ISSUER)); Response response = getWithoutToken("/api/protected"); assertThat(response.getStatus(), is(Status.UNAUTHORIZED.getStatusCode())); assertThat(response.readEntity(ErrorType.class), is(new ErrorType("failed to validate Authorization token", "request missing Authorization Bearer token header"))); // verify that error header is present assertThat(response.getHeaderString("WWW-Authenticate"), is(notNullValue())); } /** * A protected resource should be possible to access if the client supplies * a valid auth token in the request. */ @Test public void accessProtectedResourceWithValidAuthToken() throws Exception { startServer(new AsymmetricKeyAuthTokenValidator(this.signatureKeyPair).withExpectedIssuer(TOKEN_ISSUER)); DateTime expirationTime = UtcTime.now().plusMinutes(10); String signedToken = signToken(TOKEN_ISSUER, this.signatureKeyPair, expirationTime); Response response = getWithToken("/api/protected", signedToken); assertThat(response.getStatus(), is(Status.OK.getStatusCode())); } /** * Verify that a protected resource cannot be accessed when a malformed * authentication token is used. */ @Test public void accessProtectedResourceWithMalformedAuthToken() throws Exception { startServer(new AsymmetricKeyAuthTokenValidator(this.signatureKeyPair).withExpectedIssuer(TOKEN_ISSUER)); String malformedToken = "eyJhbGciOiJIUzI1"; // access protected resource (with authentication token) WebTarget resource = RestClients.httpsNoAuth().target(httpsUrl("/api/protected")); String authzHeader = "Bearer " + malformedToken; Response response = resource.request().header("Authorization", authzHeader).get(); assertThat(response.getStatus(), is(Status.UNAUTHORIZED.getStatusCode())); assertThat(response.readEntity(ErrorType.class), is(new ErrorType("failed to validate Authorization token", "malformed Authorization Bearer token"))); // verify that error header is present assertThat(response.getHeaderString("WWW-Authenticate"), is(notNullValue())); } /** * Verify that a protected resource cannot be accessed when an * authentication token signed with the wrong key (or some other party) is * used. */ @Test public void accessProtectedResourceWithForgedAuthToken() throws Exception { startServer(new AsymmetricKeyAuthTokenValidator(this.signatureKeyPair).withExpectedIssuer(TOKEN_ISSUER)); DateTime expirationTime = UtcTime.now().plusMinutes(10); RsaJsonWebKey wrongKeyPair = RsaJwkGenerator.generateJwk(2048); String forgedToken = signToken(TOKEN_ISSUER, wrongKeyPair, expirationTime); Response response = getWithToken("/api/protected", forgedToken); assertThat(response.getStatus(), is(Status.UNAUTHORIZED.getStatusCode())); assertThat(response.readEntity(ErrorType.class), is(error("failed to validate Authorization token"))); // verify that error header is present assertThat(response.getHeaderString("WWW-Authenticate"), is(notNullValue())); } /** * Verify that it isn't possible to access a protected resource with an auth * token that has been tampered with (modify the claims part of the token). */ @Test public void accessProtectedResourceWithTamperedAuthToken() throws Exception { startServer(new AsymmetricKeyAuthTokenValidator(this.signatureKeyPair).withExpectedIssuer(TOKEN_ISSUER)); DateTime expirationTime = UtcTime.now().plusMinutes(10); String legitToken = signToken(TOKEN_ISSUER, this.signatureKeyPair, expirationTime); // modify the claims part in an attempt to try and reuse a token but // issue it for a different client subject LOG.debug("legitimate token: {}", legitToken); // try to modify the signature part of the token // <B64-encoded header>.<B64-encoded claims>.<B64-encoded signature> String[] parts = legitToken.split("\\."); String claims = new String(Base64.getDecoder().decode(parts[1]), StandardCharsets.UTF_8); JwtClaims legitClaims = JwtClaims.parse(claims); LOG.debug("legit claims: {}", legitClaims); legitClaims.setSubject("malicious@elastisys.com"); LOG.debug("tampered claims: {}", legitClaims); parts[1] = Base64.getEncoder().encodeToString(legitClaims.toJson().getBytes()); String tamperedToken = String.join(".", parts); LOG.debug("tampered token: {}", tamperedToken); Response response = getWithToken("/api/protected", tamperedToken); assertThat(response.getStatus(), is(Status.UNAUTHORIZED.getStatusCode())); assertThat(response.readEntity(ErrorType.class), is(error("failed to validate Authorization token"))); // verify that error header is present assertThat(response.getHeaderString("WWW-Authenticate"), is(notNullValue())); } /** * Verify that the {@link AuthTokenRequestFilter} only applies to resource * classes/methods annotated with {@link RequireJwtAuthentication}. For * other, unprotected, resources no auth token should be required. */ @Test public void accessUnprotectedResourceWithoutAuthToken() throws Exception { startServer(new AsymmetricKeyAuthTokenValidator(this.signatureKeyPair).withExpectedIssuer(TOKEN_ISSUER)); Response response = getWithoutToken("/api/unprotected"); assertThat(response.getStatus(), is(Status.OK.getStatusCode())); // should also be possible to include whatever token in the request, // since the token is not validated (or even inspected). response = getWithToken("/api/unprotected", "bogus_token"); assertThat(response.getStatus(), is(Status.OK.getStatusCode())); } /** * It should not be possible to access a protected resource with an expired * auth token. */ @Test public void accessProtectedResourceWithExpiredAuthToken() throws Exception { startServer(new AsymmetricKeyAuthTokenValidator(this.signatureKeyPair).withExpectedIssuer(TOKEN_ISSUER)); DateTime expirationTime = UtcTime.now().plusMinutes(10); String tokenWithExpiration = signToken(TOKEN_ISSUER, this.signatureKeyPair, expirationTime); Response response = getWithToken("/api/protected", tokenWithExpiration); assertThat(response.getStatus(), is(Status.OK.getStatusCode())); // wait for token to expire FrozenTime.setFixed(expirationTime); response = getWithToken("/api/protected", tokenWithExpiration); assertThat(response.getStatus(), is(Status.UNAUTHORIZED.getStatusCode())); assertThat(response.readEntity(ErrorType.class), is(error("failed to validate Authorization token"))); // verify that error header is present assertThat(response.getHeaderString("WWW-Authenticate"), is(notNullValue())); } /** * A failing auth token validator implementation should cause a * {@code 401 (Unauthorized)} response that includes the exception message * in the response {@link ErrorType}. */ @Test public void failingAuthTokenValidator() throws Exception { AuthTokenValidator failingValidator = mock(AuthTokenValidator.class); when(failingValidator.validate(Matchers.anyString())).thenThrow(new RuntimeException("internal error")); startServer(failingValidator); String signedToken = signToken(TOKEN_ISSUER, this.signatureKeyPair, UtcTime.now().plusMinutes(10)); Response response = getWithToken("/api/protected", signedToken); assertThat(response.getStatus(), is(Status.UNAUTHORIZED.getStatusCode())); assertThat(response.readEntity(ErrorType.class), is(new ErrorType("failed to validate Authorization token", "internal error"))); // verify that error header is present assertThat(response.getHeaderString("WWW-Authenticate"), is(notNullValue())); } /** * Starts a test server with an {@link AuthTokenRequestFilter} that makes * use of the given {@link AuthTokenValidator} used to protect a * {@link SecuredApplication}. * * @param authTokenValidator * @throws Exception */ private void startServer(AuthTokenValidator authTokenValidator) throws Exception { authTokenFilter = new AuthTokenRequestFilter(authTokenValidator); ServletContainer appServlet = new ServletContainer(new SecuredApplication(authTokenFilter)); ServletDefinition apiServlet = new ServletDefinition.Builder().servlet(appServlet).servletPath("/api") .requireHttps(true).requireBasicAuth(false).build(); this.server = ServletServerBuilder.create().httpsPort(httpsPort).sslKeyStoreType(SslKeyStoreType.PKCS12) .sslKeyStorePath(SERVER_KEYSTORE_PATH).sslKeyStorePassword(SERVER_KEYSTORE_PASSWORD) .sslRequireClientCert(false).addServlet(apiServlet).build(); this.server.start(); } /** * Signs an JWT authentication token, acting as simulated authentication * endpoint that issues auth tokens. * * @param tokenIssuer * @param signatureKeyPair * @param expirationTime * Expiration time in minutes to set for {@code exp} claim. Can * be <code>null</code>, in which case the header is left out. * @return * @throws JoseException */ private String signToken(String tokenIssuer, RsaJsonWebKey signatureKeyPair, DateTime expirationTime) throws JoseException { // Create the Claims, which will be the content of the JWT JwtClaims claims = new JwtClaims(); claims.setIssuer(tokenIssuer); if (expirationTime != null) { claims.setExpirationTime(NumericDate.fromMilliseconds(expirationTime.getMillis())); } claims.setGeneratedJwtId(); NumericDate now = NumericDate.fromMilliseconds(UtcTime.now().getMillis()); claims.setIssuedAt(now); // the subject/principal is whom the token is about claims.setSubject("client@elastisys.com"); // additional claims claims.setClaim("role", "user"); JsonWebSignature jws = new JsonWebSignature(); jws.setPayload(claims.toJson()); jws.setKey(signatureKeyPair.getPrivateKey()); jws.setKeyIdHeaderValue(signatureKeyPair.getKeyId()); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.RSA_USING_SHA256); return jws.getCompactSerialization(); } /** * Performs a {@code GET} for a given resource without supplying a * {@code Authorization: Bearer <token>} header. * * @param resourcePath * @return */ private Response getWithoutToken(String resourcePath) { WebTarget resource = RestClients.httpsNoAuth().target(httpsUrl(resourcePath)); return resource.request().get(); } /** * Performs a {@code GET} for a given resource, supplying a * {@code Authorization: Bearer <token>} header. * * @param resourcePath * @param authToken * @return */ private Response getWithToken(String resourcePath, String authToken) { WebTarget resource = RestClients.httpsNoAuth().target(httpsUrl(resourcePath)); String authzHeader = "Bearer " + authToken; return resource.request().header("Authorization", authzHeader).get(); } private static String httpsUrl(String resourcePath) { String absolutePath = makeAbsolute(resourcePath); return String.format("https://localhost:%d%s", httpsPort, absolutePath); } private static String makeAbsolute(String resourcePath) { if (!resourcePath.startsWith("/")) { resourcePath = "/" + resourcePath; } return resourcePath; } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.airavata.gfac.bes.provider.impl; import java.util.Calendar; import java.util.Map; import org.apache.airavata.registry.cpi.AppCatalogException; import org.apache.airavata.common.exception.ApplicationSettingsException; import org.apache.airavata.gfac.core.GFacException; import org.apache.airavata.gfac.bes.security.UNICORESecurityContext; import org.apache.airavata.gfac.bes.security.X509SecurityContext; import org.apache.airavata.gfac.bes.utils.BESConstants; import org.apache.airavata.gfac.bes.utils.DataTransferrer; import org.apache.airavata.gfac.bes.utils.JSDLGenerator; import org.apache.airavata.gfac.bes.utils.SecurityUtils; import org.apache.airavata.gfac.bes.utils.StorageCreator; import org.apache.airavata.gfac.core.context.JobExecutionContext; import org.apache.airavata.gfac.core.provider.AbstractProvider; import org.apache.airavata.gfac.core.provider.GFacProvider; import org.apache.airavata.gfac.core.provider.GFacProviderException; import org.apache.airavata.gfac.core.GFacUtils; import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionInterface; import org.apache.airavata.model.appcatalog.computeresource.JobSubmissionProtocol; import org.apache.airavata.model.appcatalog.computeresource.UnicoreJobSubmission; import org.apache.airavata.model.messaging.event.JobIdentifier; import org.apache.airavata.model.messaging.event.JobStatusChangeRequestEvent; import org.apache.airavata.model.experiment.JobDetails; import org.apache.airavata.model.experiment.JobState; import org.apache.xmlbeans.XmlCursor; import org.bouncycastle.asn1.x500.style.BCStyle; import org.ggf.schemas.bes.x2006.x08.besFactory.ActivityStateEnumeration; import org.ggf.schemas.bes.x2006.x08.besFactory.ActivityStateEnumeration.Enum; import org.ggf.schemas.bes.x2006.x08.besFactory.ActivityStatusType; import org.ggf.schemas.bes.x2006.x08.besFactory.CreateActivityDocument; import org.ggf.schemas.bes.x2006.x08.besFactory.CreateActivityResponseDocument; import org.ggf.schemas.bes.x2006.x08.besFactory.GetActivityStatusesDocument; import org.ggf.schemas.bes.x2006.x08.besFactory.GetActivityStatusesResponseDocument; import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionDocument; import org.ggf.schemas.jsdl.x2005.x11.jsdl.JobDefinitionType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.w3.x2005.x08.addressing.EndpointReferenceType; import de.fzj.unicore.bes.client.ActivityClient; import de.fzj.unicore.bes.client.FactoryClient; import de.fzj.unicore.bes.faults.UnknownActivityIdentifierFault; import de.fzj.unicore.uas.client.StorageClient; import de.fzj.unicore.wsrflite.xmlbeans.WSUtilities; import eu.emi.security.authn.x509.impl.X500NameUtils; import eu.unicore.util.httpclient.DefaultClientConfiguration; public class BESProvider extends AbstractProvider implements GFacProvider, BESConstants { protected final Logger log = LoggerFactory.getLogger(this.getClass()); private DefaultClientConfiguration secProperties; private String jobId; public void initialize(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException { log.info("Initializing UNICORE Provider.."); super.initialize(jobExecutionContext); secProperties = (DefaultClientConfiguration) jobExecutionContext.getProperty(PROP_CLIENT_CONF); if (secProperties != null) { secProperties = secProperties.clone(); return; } SecurityUtils.addSecurityContext(jobExecutionContext); UNICORESecurityContext unicoreContext = (UNICORESecurityContext) jobExecutionContext.getSecurityContext(X509SecurityContext.X509_SECURITY_CONTEXT); try{ if (jobExecutionContext.getExperiment() .getUserConfigurationData().isGenerateCert()) { secProperties = unicoreContext .getDefaultConfiguration(false, jobExecutionContext .getExperiment().getUserConfigurationData()); }else { secProperties = unicoreContext.getDefaultConfiguration(false); } } catch (ApplicationSettingsException e) { throw new GFacProviderException("Error initializing security of Unicore provider", e); } if (log.isDebugEnabled()) { log.debug("Security properties initialized."); } } public void execute(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException { StorageClient sc = null; try { JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface(); JobSubmissionProtocol protocol = preferredJobSubmissionInterface.getJobSubmissionProtocol(); String interfaceId = preferredJobSubmissionInterface.getJobSubmissionInterfaceId(); String factoryUrl = null; if (protocol.equals(JobSubmissionProtocol.UNICORE)) { UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(interfaceId); factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL(); } EndpointReferenceType eprt = EndpointReferenceType.Factory.newInstance(); eprt.addNewAddress().setStringValue(factoryUrl); String userDN = getUserName(jobExecutionContext); // TODO: to be removed if (userDN == null || userDN.equalsIgnoreCase("admin")) { userDN = "CN=zdv575, O=Ultrascan Gateway, C=DE"; } CreateActivityDocument cad = CreateActivityDocument.Factory.newInstance(); JobDefinitionDocument jobDefDoc = JobDefinitionDocument.Factory.newInstance(); // create storage StorageCreator storageCreator = new StorageCreator(secProperties, factoryUrl, 5, null); sc = storageCreator.createStorage(); JobDefinitionType jobDefinition = JSDLGenerator.buildJSDLInstance(jobExecutionContext, sc.getUrl()).getJobDefinition(); cad.addNewCreateActivity().addNewActivityDocument().setJobDefinition(jobDefinition); log.info("JSDL" + jobDefDoc.toString()); // upload files if any DataTransferrer dt = new DataTransferrer(jobExecutionContext, sc); dt.uploadLocalFiles(); JobDetails jobDetails = new JobDetails(); FactoryClient factory = new FactoryClient(eprt, secProperties); log.info(String.format("Activity Submitting to %s ... \n", factoryUrl)); CreateActivityResponseDocument response = factory.createActivity(cad); log.info(String.format("Activity Submitted to %s \n", factoryUrl)); EndpointReferenceType activityEpr = response.getCreateActivityResponse().getActivityIdentifier(); log.info("Activity : " + activityEpr.getAddress().getStringValue() + " Submitted."); // factory.waitWhileActivityIsDone(activityEpr, 1000); jobId = WSUtilities.extractResourceID(activityEpr); if (jobId == null) { jobId = new Long(Calendar.getInstance().getTimeInMillis()) .toString(); } log.info("JobID: " + jobId); jobDetails.setJobID(jobId); jobDetails.setJobDescription(activityEpr.toString()); jobExecutionContext.setJobDetails(jobDetails); GFacUtils.saveJobStatus(jobExecutionContext, jobDetails, JobState.SUBMITTED); log.info(formatStatusMessage(activityEpr.getAddress() .getStringValue(), factory.getActivityStatus(activityEpr) .toString())); waitUntilDone(eprt, activityEpr, jobDetails, secProperties); ActivityStatusType activityStatus = null; activityStatus = getStatus(factory, activityEpr); log.info(formatStatusMessage(activityEpr.getAddress().getStringValue(), activityStatus.getState().toString())); ActivityClient activityClient; activityClient = new ActivityClient(activityEpr, secProperties); dt.setStorageClient(activityClient.getUspaceClient()); if ((activityStatus.getState() == ActivityStateEnumeration.FAILED)) { String error = activityStatus.getFault().getFaultcode() .getLocalPart() + "\n" + activityStatus.getFault().getFaultstring() + "\n EXITCODE: " + activityStatus.getExitCode(); log.info(error); JobState applicationJobStatus = JobState.FAILED; sendNotification(jobExecutionContext,applicationJobStatus); GFacUtils.updateJobStatus(jobExecutionContext, jobDetails, applicationJobStatus); try {Thread.sleep(5000);} catch (InterruptedException e) {} //What if job is failed before execution and there are not stdouts generated yet? log.debug("Downloading any standard output and error files, if they were produced."); dt.downloadStdOuts(); } else if (activityStatus.getState() == ActivityStateEnumeration.CANCELLED) { JobState applicationJobStatus = JobState.CANCELED; sendNotification(jobExecutionContext,applicationJobStatus); GFacUtils.updateJobStatus(jobExecutionContext, jobDetails, applicationJobStatus); throw new GFacProviderException( jobExecutionContext.getExperimentID() + "Job Canceled"); } else if (activityStatus.getState() == ActivityStateEnumeration.FINISHED) { try { Thread.sleep(5000); JobState applicationJobStatus = JobState.COMPLETE; sendNotification(jobExecutionContext,applicationJobStatus); } catch (InterruptedException e) { } if (activityStatus.getExitCode() == 0) { dt.downloadRemoteFiles(); } else { dt.downloadStdOuts(); } } } catch (AppCatalogException e) { log.error("Error while retrieving UNICORE job submission.."); throw new GFacProviderException("Error while retrieving UNICORE job submission..", e); } catch (Exception e) { log.error("Cannot create storage.."); throw new GFacProviderException("Cannot create storage..", e); } finally { // destroy sms instance try { if (sc != null) { sc.destroy(); } } catch (Exception e) { log.warn( "Cannot destroy temporary SMS instance:" + sc.getUrl(), e); } } } private JobState getApplicationJobStatus(ActivityStatusType activityStatus) { if (activityStatus == null) { return JobState.UNKNOWN; } Enum state = activityStatus.getState(); String status = null; XmlCursor acursor = activityStatus.newCursor(); try { if (acursor.toFirstChild()) { if (acursor.getName().getNamespaceURI() .equals("http://schemas.ogf.org/hpcp/2007/01/fs")) { status = acursor.getName().getLocalPart(); } } if (status != null) { if (status.equalsIgnoreCase("Queued") || status.equalsIgnoreCase("Starting") || status.equalsIgnoreCase("Ready")) { return JobState.QUEUED; } else if (status.equalsIgnoreCase("Staging-In")) { return JobState.SUBMITTED; } else if (status.equalsIgnoreCase("FINISHED")) { return JobState.COMPLETE; }else if(status.equalsIgnoreCase("Staging-Out")){ return JobState.ACTIVE; } else if (status.equalsIgnoreCase("Executing")) { return JobState.ACTIVE; } else if (status.equalsIgnoreCase("FAILED")) { return JobState.FAILED; } else if (status.equalsIgnoreCase("CANCELLED")) { return JobState.CANCELED; } } else { if (ActivityStateEnumeration.CANCELLED.equals(state)) { return JobState.CANCELED; } else if (ActivityStateEnumeration.FAILED.equals(state)) { return JobState.FAILED; } else if (ActivityStateEnumeration.FINISHED.equals(state)) { return JobState.COMPLETE; } else if (ActivityStateEnumeration.RUNNING.equals(state)) { return JobState.ACTIVE; } } } finally { if (acursor != null) acursor.dispose(); } return JobState.UNKNOWN; } /** * EndpointReference need to be saved to make cancel work. * * @param activityEpr * @param jobExecutionContext * @throws GFacProviderException */ public boolean cancelJob( JobExecutionContext jobExecutionContext) throws GFacProviderException { try { String activityEpr = jobExecutionContext.getJobDetails().getJobDescription(); // initSecurityProperties(jobExecutionContext); EndpointReferenceType eprt = EndpointReferenceType.Factory .parse(activityEpr); JobSubmissionInterface preferredJobSubmissionInterface = jobExecutionContext.getPreferredJobSubmissionInterface(); JobSubmissionProtocol protocol = preferredJobSubmissionInterface.getJobSubmissionProtocol(); String interfaceId = preferredJobSubmissionInterface.getJobSubmissionInterfaceId(); String factoryUrl = null; if (protocol.equals(JobSubmissionProtocol.UNICORE)) { UnicoreJobSubmission unicoreJobSubmission = GFacUtils.getUnicoreJobSubmission(interfaceId); factoryUrl = unicoreJobSubmission.getUnicoreEndPointURL(); } EndpointReferenceType epr = EndpointReferenceType.Factory .newInstance(); epr.addNewAddress().setStringValue(factoryUrl); FactoryClient factory = new FactoryClient(epr, secProperties); factory.terminateActivity(eprt); return true; } catch (Exception e) { throw new GFacProviderException(e.getLocalizedMessage(), e); } } // FIXME: Get user details private String getUserName(JobExecutionContext context) { // if (context.getConfigurationData()!= null) { // return // context.getConfigurationData().getBasicMetadata().getUserName(); // } else { return ""; // } } protected ActivityStatusType getStatus(FactoryClient fc, EndpointReferenceType activityEpr) throws UnknownActivityIdentifierFault { GetActivityStatusesDocument stats = GetActivityStatusesDocument.Factory .newInstance(); stats.addNewGetActivityStatuses().setActivityIdentifierArray( new EndpointReferenceType[] { activityEpr }); GetActivityStatusesResponseDocument resDoc = fc .getActivityStatuses(stats); ActivityStatusType activityStatus = resDoc .getGetActivityStatusesResponse().getResponseArray()[0] .getActivityStatus(); return activityStatus; } protected String formatStatusMessage(String activityUrl, String status) { return String.format("Activity %s is %s.\n", activityUrl, status); } protected String subStatusAsString(ActivityStatusType statusType) { StringBuffer sb = new StringBuffer(); sb.append(statusType.getState().toString()); XmlCursor acursor = statusType.newCursor(); if (acursor.toFirstChild()) { do { if (acursor.getName().getNamespaceURI() .equals("http://schemas.ogf.org/hpcp/2007/01/fs")) { sb.append(":"); sb.append(acursor.getName().getLocalPart()); } } while (acursor.toNextSibling()); acursor.dispose(); return sb.toString(); } else { acursor.dispose(); return sb.toString(); } } private String getCNFromUserDN(String userDN) { return X500NameUtils.getAttributeValues(userDN, BCStyle.CN)[0]; } @Override public void initProperties(Map<String, String> properties) throws GFacProviderException, GFacException { // TODO Auto-generated method stub } @Override public void dispose(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException { secProperties = null; } @Override public void recover(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException { // TODO: Auto generated method body. } @Override public void monitor(JobExecutionContext jobExecutionContext) throws GFacProviderException, GFacException { // TODO: Auto generated method body. } protected void waitUntilDone(EndpointReferenceType factoryEpr, EndpointReferenceType activityEpr, JobDetails jobDetails, DefaultClientConfiguration secProperties) throws Exception { try { FactoryClient factoryClient = new FactoryClient(factoryEpr, secProperties); JobState applicationJobStatus = null; while ((factoryClient.getActivityStatus(activityEpr) != ActivityStateEnumeration.FINISHED) && (factoryClient.getActivityStatus(activityEpr) != ActivityStateEnumeration.FAILED) && (factoryClient.getActivityStatus(activityEpr) != ActivityStateEnumeration.CANCELLED) && (applicationJobStatus != JobState.COMPLETE)) { ActivityStatusType activityStatus = getStatus(factoryClient, activityEpr); applicationJobStatus = getApplicationJobStatus(activityStatus); sendNotification(jobExecutionContext,applicationJobStatus); // GFacUtils.updateApplicationJobStatus(jobExecutionContext,jobId, // applicationJobStatus); try { Thread.sleep(5000); } catch (InterruptedException e) {} continue; } } catch(Exception e) { log.error("Error monitoring job status.."); throw e; } } private void sendNotification(JobExecutionContext jobExecutionContext, JobState status) { JobStatusChangeRequestEvent jobStatus = new JobStatusChangeRequestEvent(); JobIdentifier jobIdentity = new JobIdentifier(jobExecutionContext.getJobDetails().getJobID(), jobExecutionContext.getTaskData().getTaskID(), jobExecutionContext.getWorkflowNodeDetails().getNodeInstanceId(), jobExecutionContext.getExperimentID(), jobExecutionContext.getGatewayID()); jobStatus.setJobIdentity(jobIdentity); jobStatus.setState(status); log.debug(jobStatus.getJobIdentity().getJobId(), "Published job status change request, " + "experiment {} , task {}", jobStatus.getJobIdentity().getExperimentId(), jobStatus.getJobIdentity().getTaskId()); jobExecutionContext.getLocalEventPublisher().publish(jobStatus); } }
/* * Copyright (C) 2007-2008 Esmertec AG. Copyright (C) 2007-2008 The Android Open * Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package info.guardianproject.otr.app.im.app; import info.guardianproject.otr.IOtrChatSession; import info.guardianproject.otr.app.im.IChatSession; import info.guardianproject.otr.app.im.R; import info.guardianproject.otr.app.im.provider.Imps; import info.guardianproject.otr.app.im.ui.RoundedAvatarDrawable; import net.java.otr4j.session.SessionStatus; import android.app.Activity; import android.content.Context; import android.database.Cursor; import android.graphics.BitmapFactory; import android.graphics.drawable.Drawable; import android.text.Spannable; import android.text.SpannableString; import android.text.TextUtils; import android.text.style.UnderlineSpan; import android.util.AttributeSet; import android.view.View; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; public class ContactView extends FrameLayout { static final String[] CONTACT_PROJECTION = { Imps.Contacts._ID, Imps.Contacts.PROVIDER, Imps.Contacts.ACCOUNT, Imps.Contacts.USERNAME, Imps.Contacts.NICKNAME, Imps.Contacts.TYPE, Imps.Contacts.SUBSCRIPTION_TYPE, Imps.Contacts.SUBSCRIPTION_STATUS, Imps.Presence.PRESENCE_STATUS, Imps.Presence.PRESENCE_CUSTOM_STATUS, Imps.Chats.LAST_MESSAGE_DATE, Imps.Chats.LAST_UNREAD_MESSAGE, Imps.Contacts.AVATAR_DATA }; static final String[] CONTACT_PROJECTION_LIGHT = { Imps.Contacts._ID, Imps.Contacts.PROVIDER, Imps.Contacts.ACCOUNT, Imps.Contacts.USERNAME, Imps.Contacts.NICKNAME, Imps.Contacts.TYPE, Imps.Contacts.SUBSCRIPTION_TYPE, Imps.Contacts.SUBSCRIPTION_STATUS, Imps.Presence.PRESENCE_STATUS, Imps.Presence.PRESENCE_CUSTOM_STATUS, Imps.Chats.LAST_MESSAGE_DATE, Imps.Chats.LAST_UNREAD_MESSAGE }; static final int COLUMN_CONTACT_ID = 0; static final int COLUMN_CONTACT_PROVIDER = 1; static final int COLUMN_CONTACT_ACCOUNT = 2; static final int COLUMN_CONTACT_USERNAME = 3; static final int COLUMN_CONTACT_NICKNAME = 4; static final int COLUMN_CONTACT_TYPE = 5; static final int COLUMN_SUBSCRIPTION_TYPE = 6; static final int COLUMN_SUBSCRIPTION_STATUS = 7; static final int COLUMN_CONTACT_PRESENCE_STATUS = 8; static final int COLUMN_CONTACT_CUSTOM_STATUS = 9; static final int COLUMN_LAST_MESSAGE_DATE = 10; static final int COLUMN_LAST_MESSAGE = 11; static final int COLUMN_AVATAR_DATA = 12; private ImApp app = null; private static Drawable BG_DARK; private static Drawable BG_LIGHT; static Drawable AVATAR_DEFAULT = null; static Drawable AVATAR_DEFAULT_GROUP = null; public ContactView(Context context, AttributeSet attrs) { super(context, attrs); app = ((ImApp)((Activity) getContext()).getApplication()); if (BG_DARK == null) { BG_DARK = getResources().getDrawable(R.drawable.message_view_rounded_dark); BG_LIGHT = getResources().getDrawable(R.drawable.message_view_rounded_light); } } static class ViewHolder { TextView mLine1; TextView mLine2; ImageView mAvatar; ImageView mStatusIcon; ImageView mEncryptionIcon; View mContainer; } public void bind(Cursor cursor, String underLineText, boolean scrolling) { bind(cursor, underLineText, true, scrolling); } public void bind(Cursor cursor, String underLineText, boolean showChatMsg, boolean scrolling) { ViewHolder holder = (ViewHolder)getTag(); if (holder.mContainer != null) if (app.isThemeDark()) { holder.mContainer.setBackgroundDrawable(BG_DARK); } else { holder.mContainer.setBackgroundDrawable(BG_LIGHT); } final long providerId = cursor.getLong(COLUMN_CONTACT_PROVIDER); final String address = cursor.getString(COLUMN_CONTACT_USERNAME); final String displayName = cursor.getString(COLUMN_CONTACT_NICKNAME); final int type = cursor.getInt(COLUMN_CONTACT_TYPE); final String lastMsg = cursor.getString(COLUMN_LAST_MESSAGE); final int presence = cursor.getInt(COLUMN_CONTACT_PRESENCE_STATUS); final int subType = cursor.getInt(COLUMN_SUBSCRIPTION_TYPE); final int subStatus = cursor.getInt(COLUMN_SUBSCRIPTION_STATUS); String statusText = cursor.getString(COLUMN_CONTACT_CUSTOM_STATUS); String nickname = displayName; if (nickname == null) nickname = address; BrandingResources brandingRes = app.getBrandingResource(providerId); if (!TextUtils.isEmpty(underLineText)) { // highlight/underline the word being searched String lowercase = nickname.toLowerCase(); int start = lowercase.indexOf(underLineText.toLowerCase()); if (start >= 0) { int end = start + underLineText.length(); SpannableString str = new SpannableString(nickname); str.setSpan(new UnderlineSpan(), start, end, Spannable.SPAN_INCLUSIVE_INCLUSIVE); holder.mLine1.setText(str); } else holder.mLine1.setText(nickname); } else holder.mLine1.setText(nickname); if (holder.mStatusIcon != null) { Drawable statusIcon = brandingRes.getDrawable(PresenceUtils.getStatusIconId(presence)); statusIcon.setBounds(0, 0, statusIcon.getIntrinsicWidth(), statusIcon.getIntrinsicHeight()); holder.mStatusIcon.setImageDrawable(statusIcon); } if (holder.mAvatar != null) { if (Imps.Contacts.TYPE_GROUP == type) { holder.mAvatar.setVisibility(View.VISIBLE); if (AVATAR_DEFAULT_GROUP == null) AVATAR_DEFAULT_GROUP = new RoundedAvatarDrawable(BitmapFactory.decodeResource(getResources(), R.drawable.group_chat)); holder.mAvatar.setImageDrawable(AVATAR_DEFAULT_GROUP); holder.mStatusIcon.setVisibility(View.GONE); } else if (cursor.getColumnIndex(Imps.Contacts.AVATAR_DATA)!=-1) { holder.mAvatar.setVisibility(View.GONE); Drawable avatar = DatabaseUtils.getAvatarFromCursor(cursor, COLUMN_AVATAR_DATA, ImApp.DEFAULT_AVATAR_WIDTH,ImApp.DEFAULT_AVATAR_HEIGHT); if (avatar != null) holder.mAvatar.setImageDrawable(avatar); else { if (AVATAR_DEFAULT == null) AVATAR_DEFAULT = new RoundedAvatarDrawable(BitmapFactory.decodeResource(getResources(), R.drawable.avatar_unknown)); holder.mAvatar.setImageDrawable(AVATAR_DEFAULT); } holder.mAvatar.setVisibility(View.VISIBLE); } else { //holder.mAvatar.setImageDrawable(getContext().getResources().getDrawable(R.drawable.avatar_unknown)); holder.mAvatar.setVisibility(View.GONE); } } if (showChatMsg && lastMsg != null) { if (holder.mLine2 != null) holder.mLine2.setText(android.text.Html.fromHtml(lastMsg).toString()); } else { if (holder.mLine2 != null) { if (statusText == null || statusText.length() == 0) { if (Imps.Contacts.TYPE_GROUP == type) { statusText = getContext().getString(R.string.menu_new_group_chat); } else { statusText = brandingRes.getString(PresenceUtils.getStatusStringRes(presence)); } } holder.mLine2.setText(statusText); } } if (subType == Imps.ContactsColumns.SUBSCRIPTION_TYPE_INVITATIONS) { // if (holder.mLine2 != null) // holder.mLine2.setText("Contact List Request"); } holder.mLine1.setVisibility(View.VISIBLE); getEncryptionState (providerId, address, holder); } private void getEncryptionState (long providerId, String address, ViewHolder holder) { try { IChatSession chatSession = app.getConnection(providerId).getChatSessionManager().getChatSession(address); if (chatSession != null) { IOtrChatSession otrChatSession = chatSession.getOtrChatSession(); if (otrChatSession != null) { SessionStatus chatStatus = SessionStatus.values()[otrChatSession.getChatStatus()]; if (chatStatus == SessionStatus.ENCRYPTED) { boolean isVerified = otrChatSession.isKeyVerified(address); if (isVerified) holder.mStatusIcon.setImageDrawable(getResources().getDrawable(R.drawable.ic_black_encrypted_and_verified)); else holder.mStatusIcon.setImageDrawable(getResources().getDrawable(R.drawable.ic_black_encrypted_not_verified)); } } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } //mCurrentChatSession.getOtrChatSession(); } /* private String queryGroupMembers(ContentResolver resolver, long groupId) { String[] projection = { Imps.GroupMembers.NICKNAME }; Uri uri = ContentUris.withAppendedId(Imps.GroupMembers.CONTENT_URI, groupId); Cursor c = resolver.query(uri, projection, null, null, null); StringBuilder buf = new StringBuilder(); if (c != null) { while (c.moveToNext()) { buf.append(c.getString(0)); if (!c.isLast()) { buf.append(','); } } } c.close(); return buf.toString(); }*/ }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jbpm.bpmn2.xml; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; import org.drools.core.xml.BaseAbstractHandler; import org.drools.core.xml.ExtensibleXmlParser; import org.drools.core.xml.Handler; import org.jbpm.bpmn2.core.Association; import org.jbpm.bpmn2.core.DataStore; import org.jbpm.bpmn2.core.Definitions; import org.jbpm.bpmn2.core.Error; import org.jbpm.bpmn2.core.Escalation; import org.jbpm.bpmn2.core.Interface; import org.jbpm.bpmn2.core.IntermediateLink; import org.jbpm.bpmn2.core.ItemDefinition; import org.jbpm.bpmn2.core.Lane; import org.jbpm.bpmn2.core.Message; import org.jbpm.bpmn2.core.SequenceFlow; import org.jbpm.bpmn2.core.Signal; import org.jbpm.compiler.xml.ProcessBuildData; import org.jbpm.process.core.ContextContainer; import org.jbpm.process.core.context.exception.ActionExceptionHandler; import org.jbpm.process.core.context.exception.CompensationHandler; import org.jbpm.process.core.context.exception.CompensationScope; import org.jbpm.process.core.context.exception.ExceptionScope; import org.jbpm.process.core.context.swimlane.Swimlane; import org.jbpm.process.core.event.EventFilter; import org.jbpm.process.core.event.EventTypeFilter; import org.jbpm.process.core.timer.Timer; import org.jbpm.process.instance.impl.CancelNodeInstanceAction; import org.jbpm.ruleflow.core.RuleFlowProcess; import org.jbpm.ruleflow.core.validation.RuleFlowProcessValidator; import org.jbpm.workflow.core.Connection; import org.jbpm.workflow.core.Constraint; import org.jbpm.workflow.core.DroolsAction; import org.jbpm.workflow.core.impl.ConnectionImpl; import org.jbpm.workflow.core.impl.ConnectionRef; import org.jbpm.workflow.core.impl.ConstraintImpl; import org.jbpm.workflow.core.impl.DroolsConsequenceAction; import org.jbpm.workflow.core.impl.ExtendedNodeImpl; import org.jbpm.workflow.core.impl.NodeImpl; import org.jbpm.workflow.core.node.ActionNode; import org.jbpm.workflow.core.node.BoundaryEventNode; import org.jbpm.workflow.core.node.CompositeContextNode; import org.jbpm.workflow.core.node.CompositeNode; import org.jbpm.workflow.core.node.ConstraintTrigger; import org.jbpm.workflow.core.node.EndNode; import org.jbpm.workflow.core.node.EventNode; import org.jbpm.workflow.core.node.EventSubProcessNode; import org.jbpm.workflow.core.node.EventTrigger; import org.jbpm.workflow.core.node.FaultNode; import org.jbpm.workflow.core.node.HumanTaskNode; import org.jbpm.workflow.core.node.RuleSetNode; import org.jbpm.workflow.core.node.Split; import org.jbpm.workflow.core.node.StartNode; import org.jbpm.workflow.core.node.StateBasedNode; import org.jbpm.workflow.core.node.StateNode; import org.jbpm.workflow.core.node.SubProcessNode; import org.jbpm.workflow.core.node.Trigger; import org.jbpm.workflow.core.node.WorkItemNode; import org.kie.api.definition.process.Node; import org.kie.api.definition.process.NodeContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xml.sax.Attributes; import org.xml.sax.SAXException; public class ProcessHandler extends BaseAbstractHandler implements Handler { private static final Logger logger = LoggerFactory.getLogger(ProcessHandler.class); public static final String CONNECTIONS = "BPMN.Connections"; public static final String LINKS = "BPMN.ThrowLinks"; public static final String ASSOCIATIONS = "BPMN.Associations"; public static final String ERRORS = "BPMN.Errors"; public static final String ESCALATIONS = "BPMN.Escalations"; static final String PROCESS_INSTANCE_SIGNAL_EVENT = "kcontext.getProcessInstance().signalEvent(\""; static final String RUNTIME_SIGNAL_EVENT = "kcontext.getKnowledgeRuntime().signalEvent(\""; static final String RUNTIME_MANAGER_SIGNAL_EVENT = "((org.kie.api.runtime.manager.RuntimeManager)kcontext.getKnowledgeRuntime().getEnvironment().get(\"RuntimeManager\")).signalEvent(\""; @SuppressWarnings("unchecked") public ProcessHandler() { if ((this.validParents == null) && (this.validPeers == null)) { this.validParents = new HashSet(); this.validParents.add(Definitions.class); this.validPeers = new HashSet(); this.validPeers.add(null); this.validPeers.add(ItemDefinition.class); this.validPeers.add(Message.class); this.validPeers.add(Interface.class); this.validPeers.add(Escalation.class); this.validPeers.add(Error.class); this.validPeers.add(Signal.class); this.validPeers.add(DataStore.class); this.validPeers.add(RuleFlowProcess.class); this.allowNesting = false; } } public Object start(final String uri, final String localName, final Attributes attrs, final ExtensibleXmlParser parser) throws SAXException { parser.startElementBuilder(localName, attrs); String id = attrs.getValue("id"); String name = attrs.getValue("name"); String packageName = attrs.getValue("http://www.jboss.org/drools", "packageName"); String dynamic = attrs.getValue("http://www.jboss.org/drools", "adHoc"); String version = attrs.getValue("http://www.jboss.org/drools", "version"); RuleFlowProcess process = new RuleFlowProcess(); process.setAutoComplete(true); process.setId(id); if (name == null) { name = id; } process.setName(name); process.setType("RuleFlow"); if (packageName == null) { packageName = "org.drools.bpmn2"; } process.setPackageName(packageName); if ("true".equals(dynamic)) { process.setDynamic(true); process.setAutoComplete(false); } if (version != null) { process.setVersion(version); } ((ProcessBuildData) parser.getData()).addProcess(process); // register the definitions object as metadata of process. process.setMetaData("Definitions", parser.getParent()); // register bpmn2 imports as meta data of process Object typedImports = ((ProcessBuildData) parser.getData()).getMetaData("Bpmn2Imports"); if (typedImports != null) { process.setMetaData("Bpmn2Imports", typedImports); } // register item definitions as meta data of process Object itemDefinitions = ((ProcessBuildData) parser.getData()).getMetaData("ItemDefinitions"); if (itemDefinitions != null) { process.setMetaData("ItemDefinitions", itemDefinitions); } // for unique id's of nodes, start with one to avoid returning wrong nodes for dynamic nodes parser.getMetaData().put("idGen", new AtomicInteger(1)); return process; } @SuppressWarnings("unchecked") public Object end(final String uri, final String localName, final ExtensibleXmlParser parser) throws SAXException { parser.endElementBuilder(); RuleFlowProcess process = (RuleFlowProcess) parser.getCurrent(); List<IntermediateLink> throwLinks = (List<IntermediateLink>) process .getMetaData(LINKS); linkIntermediateLinks(process, throwLinks); List<SequenceFlow> connections = (List<SequenceFlow>) process.getMetaData(CONNECTIONS); linkConnections(process, connections); linkBoundaryEvents(process); // This must be done *after* linkConnections(process, connections) // because it adds hidden connections for compensations List<Association> associations = (List<Association>) process.getMetaData(ASSOCIATIONS); linkAssociations((Definitions) process.getMetaData("Definitions"), process, associations); List<Lane> lanes = (List<Lane>) process.getMetaData(LaneHandler.LANES); assignLanes(process, lanes); postProcessNodes(process, process); return process; } public static void linkIntermediateLinks(NodeContainer process, List<IntermediateLink> links) { if (null != links) { // Search throw links ArrayList<IntermediateLink> throwLinks = new ArrayList<IntermediateLink>(); for (IntermediateLink aLinks : links) { if (aLinks.isThrowLink()) { throwLinks.add(aLinks); } } // Look for catch links for a throw link for (IntermediateLink throwLink : throwLinks) { ArrayList<IntermediateLink> linksWithSharedNames = new ArrayList<IntermediateLink>(); for (IntermediateLink aLink : links) { if (throwLink.getName().equals(aLink.getName())) { linksWithSharedNames.add(aLink); } } if (linksWithSharedNames.size() < 2) { throw new IllegalArgumentException( "There should be at least 2 link events to make a connection"); } linksWithSharedNames.remove(throwLink); // Make the connections Node t = findNodeByIdOrUniqueIdInMetadata(process, throwLink.getUniqueId()); // connect throw to catch for (IntermediateLink catchLink : linksWithSharedNames) { Node c = findNodeByIdOrUniqueIdInMetadata(process, catchLink.getUniqueId()); if (t != null && c != null) { Connection result = new ConnectionImpl(t, NodeImpl.CONNECTION_DEFAULT_TYPE, c, NodeImpl.CONNECTION_DEFAULT_TYPE); result.setMetaData("linkNodeHidden", "yes"); } } // Remove processed links links.remove(throwLink); links.removeAll(linksWithSharedNames); } if (links.size() > 0) { throw new IllegalArgumentException(links.size() + " links were not processed"); } } } private static Object findNodeOrDataStoreByUniqueId(Definitions definitions, NodeContainer nodeContainer, final String nodeRef, String errorMsg) { if( definitions != null ) { List<DataStore> dataStores = definitions.getDataStores(); if( dataStores != null ) { for( DataStore dataStore : dataStores ) { if( nodeRef.equals(dataStore.getId()) ) { return dataStore; } } } } return findNodeByIdOrUniqueIdInMetadata(nodeContainer, nodeRef, errorMsg); } private static Node findNodeByIdOrUniqueIdInMetadata( NodeContainer nodeContainer, String targetRef) { return findNodeByIdOrUniqueIdInMetadata(nodeContainer, targetRef, "Could not find target node for connection:" + targetRef); } private static Node findNodeByIdOrUniqueIdInMetadata(NodeContainer nodeContainer, final String nodeRef, String errorMsg) { Node node = null; // try looking for a node with same "UniqueId" (in metadata) for (Node containerNode: nodeContainer.getNodes()) { if (nodeRef.equals(containerNode.getMetaData().get("UniqueId"))) { node = containerNode; break; } } if (node == null) { throw new IllegalArgumentException(errorMsg); } return node; } public Class<?> generateNodeFor() { return RuleFlowProcess.class; } public static void linkConnections(NodeContainer nodeContainer, List<SequenceFlow> connections) { if (connections != null) { for (SequenceFlow connection: connections) { String sourceRef = connection.getSourceRef(); Node source = findNodeByIdOrUniqueIdInMetadata(nodeContainer, sourceRef, "Could not find source node for connection:" + sourceRef); if (source instanceof EventNode) { for (EventFilter eventFilter : ((EventNode) source).getEventFilters()) { if (eventFilter instanceof EventTypeFilter) { if ("Compensation".equals(((EventTypeFilter) eventFilter).getType())) { // While this isn't explicitly stated in the spec, // BPMN Method & Style, 2nd Ed. (Silver), states this on P. 131 throw new IllegalArgumentException( "A Compensation Boundary Event can only be *associated* with a compensation activity via an Association, not via a Sequence Flow element."); } } } } String targetRef = connection.getTargetRef(); Node target = findNodeByIdOrUniqueIdInMetadata(nodeContainer, targetRef, "Could not find target node for connection:" + targetRef); Connection result = new ConnectionImpl( source, NodeImpl.CONNECTION_DEFAULT_TYPE, target, NodeImpl.CONNECTION_DEFAULT_TYPE); result.setMetaData("bendpoints", connection.getBendpoints()); result.setMetaData("UniqueId", connection.getId()); if ("true".equals(System.getProperty("jbpm.enable.multi.con"))){ NodeImpl nodeImpl = (NodeImpl) source; Constraint constraint = buildConstraint(connection, nodeImpl); if (constraint != null) { nodeImpl.addConstraint(new ConnectionRef(target.getId(), NodeImpl.CONNECTION_DEFAULT_TYPE), constraint); } } else if (source instanceof Split) { Split split = (Split) source; Constraint constraint = buildConstraint(connection, split); split.addConstraint( new ConnectionRef(target.getId(), NodeImpl.CONNECTION_DEFAULT_TYPE), constraint); } } } } public static void linkBoundaryEvents(NodeContainer nodeContainer) { for (Node node: nodeContainer.getNodes()) { if (node instanceof EventNode) { final String attachedTo = (String) node.getMetaData().get("AttachedTo"); if (attachedTo != null) { for( EventFilter filter : ((EventNode) node).getEventFilters() ) { String type = ((EventTypeFilter) filter).getType(); Node attachedNode = findNodeByIdOrUniqueIdInMetadata(nodeContainer, attachedTo, "Could not find node to attach to: " + attachedTo); // if (!(attachedNode instanceof StateBasedNode) && !type.equals("Compensation")) { throw new IllegalArgumentException("Boundary events are supported only on StateBasedNode, found node: " + attachedNode.getClass().getName() + " [" + attachedNode.getMetaData().get("UniqueId") + "]"); } if (type.startsWith("Escalation")) { linkBoundaryEscalationEvent(nodeContainer, node, attachedTo, attachedNode); } else if (type.startsWith("Error-")) { linkBoundaryErrorEvent(nodeContainer, node, attachedTo, attachedNode); } else if (type.startsWith("Timer-")) { linkBoundaryTimerEvent(nodeContainer, node, attachedTo, attachedNode); } else if (type.equals("Compensation")) { linkBoundaryCompensationEvent(nodeContainer, node, attachedTo, attachedNode); } else if (node.getMetaData().get("SignalName") != null || type.startsWith("Message-")) { linkBoundarySignalEvent(nodeContainer, node, attachedTo, attachedNode); } else if (type.startsWith("Condition-")) { linkBoundaryConditionEvent(nodeContainer, node, attachedTo, attachedNode); } } } } } } private static void linkBoundaryEscalationEvent(NodeContainer nodeContainer, Node node, String attachedTo, Node attachedNode) { boolean cancelActivity = (Boolean) node.getMetaData().get("CancelActivity"); String escalationCode = (String) node.getMetaData().get("EscalationEvent"); String escalationStructureRef = (String) node.getMetaData().get("EscalationStructureRef"); ContextContainer compositeNode = (ContextContainer) attachedNode; ExceptionScope exceptionScope = (ExceptionScope) compositeNode.getDefaultContext(ExceptionScope.EXCEPTION_SCOPE); if (exceptionScope == null) { exceptionScope = new ExceptionScope(); compositeNode.addContext(exceptionScope); compositeNode.setDefaultContext(exceptionScope); } String variable = ((EventNode)node).getVariableName(); ActionExceptionHandler exceptionHandler = new ActionExceptionHandler(); DroolsConsequenceAction action = new DroolsConsequenceAction("java", PROCESS_INSTANCE_SIGNAL_EVENT + "Escalation-" + attachedTo + "-" + escalationCode + "\", kcontext.getVariable(\"" + variable +"\"));"); exceptionHandler.setAction(action); exceptionHandler.setFaultVariable(variable); exceptionScope.setExceptionHandler(escalationCode, exceptionHandler); if (escalationStructureRef != null) { exceptionScope.setExceptionHandler(escalationStructureRef, exceptionHandler); } if (cancelActivity) { List<DroolsAction> actions = ((EventNode)node).getActions(EndNode.EVENT_NODE_EXIT); if (actions == null) { actions = new ArrayList<DroolsAction>(); } DroolsConsequenceAction cancelAction = new DroolsConsequenceAction("java", null); cancelAction.setMetaData("Action", new CancelNodeInstanceAction(attachedTo)); actions.add(cancelAction); ((EventNode)node).setActions(EndNode.EVENT_NODE_EXIT, actions); } } private static void linkBoundaryErrorEvent(NodeContainer nodeContainer, Node node, String attachedTo, Node attachedNode) { ContextContainer compositeNode = (ContextContainer) attachedNode; ExceptionScope exceptionScope = (ExceptionScope) compositeNode.getDefaultContext(ExceptionScope.EXCEPTION_SCOPE); if (exceptionScope == null) { exceptionScope = new ExceptionScope(); compositeNode.addContext(exceptionScope); compositeNode.setDefaultContext(exceptionScope); } String errorCode = (String) node.getMetaData().get("ErrorEvent"); boolean hasErrorCode = (Boolean) node.getMetaData().get("HasErrorEvent"); String errorStructureRef = (String) node.getMetaData().get("ErrorStructureRef"); ActionExceptionHandler exceptionHandler = new ActionExceptionHandler(); String variable = ((EventNode)node).getVariableName(); DroolsConsequenceAction action = new DroolsConsequenceAction("java", PROCESS_INSTANCE_SIGNAL_EVENT + "Error-" + attachedTo + "-" + errorCode + "\", kcontext.getVariable(\"" + variable +"\"));"); exceptionHandler.setAction(action); exceptionHandler.setFaultVariable(variable); exceptionScope.setExceptionHandler(hasErrorCode?errorCode:null, exceptionHandler); if (errorStructureRef != null) { exceptionScope.setExceptionHandler(errorStructureRef, exceptionHandler); } List<DroolsAction> actions = ((EventNode)node).getActions(EndNode.EVENT_NODE_EXIT); if (actions == null) { actions = new ArrayList<DroolsAction>(); } DroolsConsequenceAction cancelAction = new DroolsConsequenceAction("java", null); cancelAction.setMetaData("Action", new CancelNodeInstanceAction(attachedTo)); actions.add(cancelAction); ((EventNode)node).setActions(EndNode.EVENT_NODE_EXIT, actions); } private static void linkBoundaryTimerEvent(NodeContainer nodeContainer, Node node, String attachedTo, Node attachedNode) { boolean cancelActivity = (Boolean) node.getMetaData().get("CancelActivity"); StateBasedNode compositeNode = (StateBasedNode) attachedNode; String timeDuration = (String) node.getMetaData().get("TimeDuration"); String timeCycle = (String) node.getMetaData().get("TimeCycle"); String timeDate = (String) node.getMetaData().get("TimeDate"); Timer timer = new Timer(); if (timeDuration != null) { timer.setDelay(timeDuration); timer.setTimeType(Timer.TIME_DURATION); compositeNode.addTimer(timer, new DroolsConsequenceAction("java", PROCESS_INSTANCE_SIGNAL_EVENT + "Timer-" + attachedTo + "-" + timeDuration + "-" + node.getId() +"\", kcontext.getNodeInstance().getId());")); } else if (timeCycle != null) { int index = timeCycle.indexOf("###"); if (index != -1) { String period = timeCycle.substring(index + 3); timeCycle = timeCycle.substring(0, index); timer.setPeriod(period); } timer.setDelay(timeCycle); timer.setTimeType(Timer.TIME_CYCLE); compositeNode.addTimer(timer, new DroolsConsequenceAction("java", PROCESS_INSTANCE_SIGNAL_EVENT + "Timer-" + attachedTo + "-" + timeCycle + (timer.getPeriod() == null ? "" : "###" + timer.getPeriod()) + "-" + node.getId() + "\", kcontext.getNodeInstance().getId());")); } else if (timeDate != null) { timer.setDate(timeDate); timer.setTimeType(Timer.TIME_DATE); compositeNode.addTimer(timer, new DroolsConsequenceAction("java", PROCESS_INSTANCE_SIGNAL_EVENT + "Timer-" + attachedTo + "-" + timeDate + "-" + node.getId() + "\", kcontext.getNodeInstance().getId());")); } if (cancelActivity) { List<DroolsAction> actions = ((EventNode)node).getActions(EndNode.EVENT_NODE_EXIT); if (actions == null) { actions = new ArrayList<DroolsAction>(); } DroolsConsequenceAction cancelAction = new DroolsConsequenceAction("java", null); cancelAction.setMetaData("Action", new CancelNodeInstanceAction(attachedTo)); actions.add(cancelAction); ((EventNode)node).setActions(EndNode.EVENT_NODE_EXIT, actions); } } private static void linkBoundaryCompensationEvent(NodeContainer nodeContainer, Node node, String attachedTo, Node attachedNode) { /** * BPMN2 Spec, p. 264: * "For an Intermediate event attached to the boundary of an activity:" * ... * The Activity the Event is attached to will provide the Id necessary * to match the Compensation Event with the Event that threw the compensation" * * In other words: "activityRef" is and should be IGNORED */ String activityRef = (String) node.getMetaData().get("ActivityRef"); if( activityRef != null ) { logger.warn("Attribute activityRef={} will be IGNORED since this is a Boundary Compensation Event.", activityRef); } // linkAssociations takes care of the rest } private static void linkBoundarySignalEvent(NodeContainer nodeContainer, Node node, String attachedTo, Node attachedNode) { boolean cancelActivity = (Boolean) node.getMetaData().get("CancelActivity"); if (cancelActivity) { List<DroolsAction> actions = ((EventNode)node).getActions(EndNode.EVENT_NODE_EXIT); if (actions == null) { actions = new ArrayList<DroolsAction>(); } DroolsConsequenceAction action = new DroolsConsequenceAction("java", null); action.setMetaData("Action", new CancelNodeInstanceAction(attachedTo)); actions.add(action); ((EventNode)node).setActions(EndNode.EVENT_NODE_EXIT, actions); } } private static void linkBoundaryConditionEvent(NodeContainer nodeContainer, Node node, String attachedTo, Node attachedNode) { String processId = ((RuleFlowProcess) nodeContainer).getId(); String eventType = "RuleFlowStateEvent-" + processId + "-" + ((EventNode) node).getUniqueId() + "-" + attachedTo; ((EventTypeFilter) ((EventNode) node).getEventFilters().get(0)).setType(eventType); boolean cancelActivity = (Boolean) node.getMetaData().get("CancelActivity"); if (cancelActivity) { List<DroolsAction> actions = ((EventNode)node).getActions(EndNode.EVENT_NODE_EXIT); if (actions == null) { actions = new ArrayList<DroolsAction>(); } DroolsConsequenceAction action = new DroolsConsequenceAction("java", null); action.setMetaData("Action", new CancelNodeInstanceAction(attachedTo)); actions.add(action); ((EventNode)node).setActions(EndNode.EVENT_NODE_EXIT, actions); } } public static void linkAssociations(Definitions definitions, NodeContainer nodeContainer, List<Association> associations) { if( associations != null ) { for( Association association : associations ) { String sourceRef = association.getSourceRef(); Object source = null; try { source = findNodeOrDataStoreByUniqueId(definitions, nodeContainer, sourceRef, "Could not find source [" + sourceRef + "] for association " + association.getId() + "]" ); } catch (IllegalArgumentException e) { // source not found } String targetRef = association.getTargetRef(); Object target = null; try { target = findNodeOrDataStoreByUniqueId(definitions, nodeContainer, targetRef, "Could not find target [" + targetRef + "] for association [" + association.getId() + "]" ); } catch (IllegalArgumentException e) { // target not found } if (source == null || target == null) { // TODO: ignoring this association for now } else if (target instanceof DataStore || source instanceof DataStore) { // TODO: ignoring data store associations for now } else if (source instanceof EventNode) { EventNode sourceNode = (EventNode) source; Node targetNode = (Node) target; checkBoundaryEventCompensationHandler(association, sourceNode, targetNode); // make sure IsForCompensation is set to true on target NodeImpl targetNodeImpl = (NodeImpl) target; String isForCompensation = "isForCompensation"; Object compensationObject = targetNodeImpl.getMetaData(isForCompensation); if( compensationObject == null ) { targetNodeImpl.setMetaData(isForCompensation, true); logger.warn("Setting {} attribute to true for node {}", isForCompensation, targetRef ); } else if( ! Boolean.parseBoolean(compensationObject.toString()) ) { throw new IllegalArgumentException(isForCompensation + " attribute [" + compensationObject + "] should be true for Compensation Activity [" + targetRef + "]"); } // put Compensation Handler in CompensationHandlerNode NodeContainer sourceParent = sourceNode.getNodeContainer(); NodeContainer targetParent = targetNode.getNodeContainer(); if( ! sourceParent.equals(targetParent) ) { throw new IllegalArgumentException("Compensation Associations may not cross (sub-)process boundaries,"); } // connect boundary event to compensation activity ConnectionImpl connection = new ConnectionImpl(sourceNode, NodeImpl.CONNECTION_DEFAULT_TYPE, targetNode, NodeImpl.CONNECTION_DEFAULT_TYPE); connection.setMetaData("UniqueId", null); connection.setMetaData("hidden", true ); connection.setMetaData("association", true ); // Compensation use cases: // - boundary event --associated-> activity // - implicit sub process compensation handler + recursive? /** * BPMN2 spec, p.442: * "A Compensation Event Sub-process becomes enabled when its parent Activity transitions into state * Completed. At that time, a snapshot of the data associated with the parent Acitivity is taken and kept for * later usage by the Compensation Event Sub-Process." */ } } } } /** * This logic belongs in {@link RuleFlowProcessValidator} -- except that {@link Association}s are a jbpm-bpmn2 class, * and {@link RuleFlowProcessValidator} is a jbpm-flow class.. * </p> * Maybe we should have a BPMNProcessValidator class? * * @param association The association to check. * @param source The source of the association. * @param target The target of the association. */ private static void checkBoundaryEventCompensationHandler(Association association, Node source, Node target) { // check that // - event node is boundary event node if( ! (source instanceof BoundaryEventNode) ) { throw new IllegalArgumentException("(Compensation) activities may only be associated with Boundary Event Nodes (not with" + source.getClass().getSimpleName() + " nodes [node " + ((String) source.getMetaData().get("UniqueId")) + "]."); } BoundaryEventNode eventNode = (BoundaryEventNode) source; // - event node has compensationEvent List<EventFilter> eventFilters = eventNode.getEventFilters(); boolean compensationCheckPassed = false; if( eventFilters != null) { for( EventFilter filter : eventFilters ) { if( filter instanceof EventTypeFilter ) { String type = ((EventTypeFilter) filter).getType(); if( type != null && type.equals("Compensation") ) { compensationCheckPassed = true; } } } } if( ! compensationCheckPassed ) { throw new IllegalArgumentException("An Event [" + ((String) eventNode.getMetaData("UniqueId")) + "] linked from an association [" + association.getId() + "] must be a (Boundary) Compensation Event."); } // - boundary event node is attached to the correct type of node? /** * Tasks: * business: RuleSetNode * manual: WorkItemNode * receive: WorkItemNode * script: ActionNode * send: WorkItemNode * service: WorkItemNode * task: WorkItemNode * user: HumanTaskNode */ String attachedToId = eventNode.getAttachedToNodeId(); Node attachedToNode = null; for( Node node : eventNode.getNodeContainer().getNodes() ) { if( attachedToId.equals(node.getMetaData().get("UniqueId")) ) { attachedToNode = node; break; } } if( attachedToNode == null ) { throw new IllegalArgumentException("Boundary Event [" + ((String) eventNode.getMetaData("UniqueId")) + "] is not attached to a node [" + attachedToId + "] that can be found."); } if( !( attachedToNode instanceof RuleSetNode || attachedToNode instanceof WorkItemNode || attachedToNode instanceof ActionNode || attachedToNode instanceof HumanTaskNode || attachedToNode instanceof CompositeNode || attachedToNode instanceof SubProcessNode) ) { throw new IllegalArgumentException("Compensation Boundary Event [" + ((String) eventNode.getMetaData("UniqueId")) + "] must be attached to a task or sub-process."); } // - associated node is a task or subProcess compensationCheckPassed = false; if( target instanceof WorkItemNode || target instanceof HumanTaskNode || target instanceof CompositeContextNode || target instanceof SubProcessNode ) { compensationCheckPassed = true; } else if( target instanceof ActionNode ) { Object nodeTypeObj = ((ActionNode) target).getMetaData("NodeType"); if( nodeTypeObj != null && nodeTypeObj.equals("ScriptTask") ) { compensationCheckPassed = true; } } if( ! compensationCheckPassed ) { throw new IllegalArgumentException("An Activity [" + ((String) ((NodeImpl)target).getMetaData("UniqueId")) + "] associated with a Boundary Compensation Event must be a Task or a (non-Event) Sub-Process"); } // - associated node does not have outgoingConnections of it's own compensationCheckPassed = true; NodeImpl targetNode = (NodeImpl) target; Map<String, List<org.kie.api.definition.process.Connection>> connectionsMap = targetNode.getOutgoingConnections(); ConnectionImpl outgoingConnection = null; for( String connectionType : connectionsMap.keySet() ) { List<org.kie.api.definition.process.Connection> connections = connectionsMap.get(connectionType); if( connections != null && ! connections.isEmpty() ) { for( org.kie.api.definition.process.Connection connection : connections ) { Object hiddenObj = connection.getMetaData().get("hidden"); if( hiddenObj != null && ((Boolean) hiddenObj) ) { continue; } outgoingConnection = (ConnectionImpl) connection; compensationCheckPassed = false; break; } } } if( ! compensationCheckPassed ) { throw new IllegalArgumentException("A Compensation Activity [" + ((String) targetNode.getMetaData("UniqueId")) + "] may not have any outgoing connection [" + (String) outgoingConnection.getMetaData("UniqueId") + "]"); } } private void assignLanes(RuleFlowProcess process, List<Lane> lanes) { List<String> laneNames = new ArrayList<String>(); Map<String, String> laneMapping = new HashMap<String, String>(); if (lanes != null) { for (Lane lane: lanes) { String name = lane.getName(); if (name != null) { Swimlane swimlane = new Swimlane(); swimlane.setName(name); process.getSwimlaneContext().addSwimlane(swimlane); laneNames.add(name); for (String flowElementRef: lane.getFlowElements()) { laneMapping.put(flowElementRef, name); } } } } assignLanes(process, laneMapping); } private void postProcessNodes(RuleFlowProcess process, NodeContainer container) { List<String> eventSubProcessHandlers = new ArrayList<String>(); for (Node node: container.getNodes()) { if (node instanceof StateNode) { StateNode stateNode = (StateNode) node; String condition = (String) stateNode.getMetaData("Condition"); Constraint constraint = new ConstraintImpl(); constraint.setConstraint(condition); constraint.setType("rule"); for (org.kie.api.definition.process.Connection connection: stateNode.getDefaultOutgoingConnections()) { stateNode.setConstraint(connection, constraint); } } else if (node instanceof NodeContainer) { // prepare event sub process if (node instanceof EventSubProcessNode) { EventSubProcessNode eventSubProcessNode = (EventSubProcessNode) node; Node[] nodes = eventSubProcessNode.getNodes(); for (Node subNode : nodes) { // avoids cyclomatic complexity if (subNode == null || ! (subNode instanceof StartNode)) { continue; } List<Trigger> triggers = ((StartNode) subNode).getTriggers(); if ( triggers == null ) { continue; } for ( Trigger trigger : triggers ) { if ( trigger instanceof EventTrigger ) { final List<EventFilter> filters = ((EventTrigger) trigger).getEventFilters(); for ( EventFilter filter : filters ) { if ( filter instanceof EventTypeFilter ) { eventSubProcessNode.addEvent((EventTypeFilter) filter); String type = ((EventTypeFilter) filter).getType(); if (type.startsWith("Error-") || type.startsWith("Escalation")) { String faultCode = (String) subNode.getMetaData().get("FaultCode"); String replaceRegExp = "Error-|Escalation-"; final String signalType = type; ExceptionScope exceptionScope = (ExceptionScope) ((ContextContainer) eventSubProcessNode.getNodeContainer()).getDefaultContext(ExceptionScope.EXCEPTION_SCOPE); if (exceptionScope == null) { exceptionScope = new ExceptionScope(); ((ContextContainer) eventSubProcessNode.getNodeContainer()).addContext(exceptionScope); ((ContextContainer) eventSubProcessNode.getNodeContainer()).setDefaultContext(exceptionScope); } String faultVariable = null; if (trigger.getInAssociations() != null && !trigger.getInAssociations().isEmpty()) { faultVariable = trigger.getInAssociations().get(0).getTarget(); } ActionExceptionHandler exceptionHandler = new ActionExceptionHandler(); DroolsConsequenceAction action = new DroolsConsequenceAction("java", PROCESS_INSTANCE_SIGNAL_EVENT + signalType+"\", " +(faultVariable==null?"null":"kcontext.getVariable(\""+faultVariable+"\")")+");"); exceptionHandler.setAction(action); exceptionHandler.setFaultVariable(faultVariable); if (faultCode != null) { String trimmedType = type.replaceFirst(replaceRegExp, ""); exceptionScope.setExceptionHandler(trimmedType, exceptionHandler); eventSubProcessHandlers.add(trimmedType); } else { exceptionScope.setExceptionHandler(faultCode, exceptionHandler); } } else if( type.equals("Compensation") ) { // 1. Find the parent sub-process to this event sub-process NodeContainer parentSubProcess; NodeContainer subProcess = eventSubProcessNode.getNodeContainer(); Object isForCompensationObj = eventSubProcessNode.getMetaData("isForCompensation"); if( isForCompensationObj == null ) { eventSubProcessNode.setMetaData("isForCompensation", true ); logger.warn( "Overriding empty or false value of \"isForCompensation\" attribute on Event Sub-Process [" + eventSubProcessNode.getMetaData("UniqueId") + "] and setting it to true."); } if( subProcess instanceof RuleFlowProcess ) { // If jBPM deletes the process (instance) as soon as the process completes.. // ..how do you expect to signal compensation on the completed process (instance)?!? throw new IllegalArgumentException("Compensation Event Sub-Processes at the process level are not supported."); } parentSubProcess = ((Node) subProcess).getNodeContainer(); // 2. The event filter (never fires, purely for dumping purposes) has already been added // 3. Add compensation scope String compensationHandlerId = (String) ((CompositeNode) subProcess).getMetaData("UniqueId"); addCompensationScope(process, eventSubProcessNode, parentSubProcess, compensationHandlerId); } } } } else if (trigger instanceof ConstraintTrigger) { ConstraintTrigger constraintTrigger = (ConstraintTrigger) trigger; if (constraintTrigger.getConstraint() != null) { String processId = ((RuleFlowProcess) container).getId(); String type = "RuleFlowStateEventSubProcess-Event-" + processId + "-" + eventSubProcessNode.getUniqueId(); EventTypeFilter eventTypeFilter = new EventTypeFilter(); eventTypeFilter.setType(type); eventSubProcessNode.addEvent(eventTypeFilter); } } } } // for( Node subNode : nodes) } postProcessNodes(process, (NodeContainer) node); } else if( node instanceof EndNode ) { handleIntermediateOrEndThrowCompensationEvent((EndNode) node); } else if( node instanceof ActionNode ) { handleIntermediateOrEndThrowCompensationEvent((ActionNode) node); } else if( node instanceof EventNode ) { final EventNode eventNode = (EventNode) node; if (!(eventNode instanceof BoundaryEventNode) && eventNode.getDefaultIncomingConnections().size() == 0) { throw new IllegalArgumentException("Event node '" + node.getName() + "' [" + node.getId() + "] has no incoming connection"); } } } // process fault node to disable termnate parent if there is event subprocess handler for (Node node: container.getNodes()) { if (node instanceof FaultNode) { FaultNode faultNode = (FaultNode) node; if (eventSubProcessHandlers.contains(faultNode.getFaultName())) { faultNode.setTerminateParent(false); } } } } private void assignLanes(NodeContainer nodeContainer, Map<String, String> laneMapping) { for (Node node: nodeContainer.getNodes()) { String lane = null; String uniqueId = (String) node.getMetaData().get("UniqueId"); if (uniqueId != null) { lane = laneMapping.get(uniqueId); } else { lane = laneMapping.get(XmlBPMNProcessDumper.getUniqueNodeId(node)); } if (lane != null) { ((NodeImpl) node).setMetaData("Lane", lane); if (node instanceof HumanTaskNode) { ((HumanTaskNode) node).setSwimlane(lane); } } if (node instanceof NodeContainer) { assignLanes((NodeContainer) node, laneMapping); } } } private static Constraint buildConstraint(SequenceFlow connection, NodeImpl node) { if (connection.getExpression() == null) { return null; } Constraint constraint = new ConstraintImpl(); String defaultConnection = (String) node.getMetaData("Default"); if (defaultConnection != null && defaultConnection.equals(connection.getId())) { constraint.setDefault(true); } if (connection.getName() != null) { constraint.setName(connection.getName()); } else { constraint.setName(""); } if (connection.getType() != null) { constraint.setType(connection.getType()); } else { constraint.setType("code"); } if (connection.getLanguage() != null) { constraint.setDialect(connection.getLanguage()); } if (connection.getExpression() != null) { constraint.setConstraint(connection.getExpression()); } constraint.setPriority(connection.getPriority()); return constraint; } protected static void addCompensationScope(final RuleFlowProcess process, final Node node, final org.kie.api.definition.process.NodeContainer parentContainer, final String compensationHandlerId) { process.getMetaData().put("Compensation", true); assert parentContainer instanceof ContextContainer : "Expected parent node to be a CompositeContextNode, not a " + parentContainer.getClass().getSimpleName(); ContextContainer contextContainer = (ContextContainer) parentContainer; CompensationScope scope = null; boolean addScope = false; if (contextContainer.getContexts(CompensationScope.COMPENSATION_SCOPE) == null) { addScope = true; } else { scope = (CompensationScope) contextContainer.getContexts(CompensationScope.COMPENSATION_SCOPE).get(0); if (scope == null) { addScope = true; } } if (addScope) { scope = new CompensationScope(); contextContainer.addContext(scope); contextContainer.setDefaultContext(scope); scope.setContextContainer(contextContainer); } CompensationHandler handler = new CompensationHandler(); handler.setNode(node); if( scope.getExceptionHandler(compensationHandlerId) != null ) { throw new IllegalArgumentException( "More than one compensation handler per node (" + compensationHandlerId + ")" + " is not supported!"); } scope.setExceptionHandler(compensationHandlerId, handler); } protected void handleIntermediateOrEndThrowCompensationEvent(ExtendedNodeImpl throwEventNode ) { if( throwEventNode.getMetaData("compensation-activityRef") != null ) { String activityRef = (String) throwEventNode.getMetaData().remove("compensation-activityRef"); NodeContainer nodeParent = (NodeContainer) throwEventNode.getNodeContainer(); if( nodeParent instanceof EventSubProcessNode ) { boolean compensationEventSubProcess = false; List<Trigger> startTriggers = ((EventSubProcessNode) nodeParent).findStartNode().getTriggers(); CESP_CHECK: for( Trigger trigger : startTriggers ) { if( trigger instanceof EventTrigger ) { for( EventFilter filter : ((EventTrigger) trigger).getEventFilters() ) { if( ((EventTypeFilter) filter).getType().equals("Compensation") ) { compensationEventSubProcess = true; break CESP_CHECK; } } } } if( compensationEventSubProcess ) { // BPMN2 spec, p. 252, p. 248: intermediate and end compensation event visibility scope nodeParent = (NodeContainer) ((NodeImpl) nodeParent).getNodeContainer(); } } String parentId; if( nodeParent instanceof RuleFlowProcess ) { parentId = ((RuleFlowProcess) nodeParent).getId(); } else { parentId = (String) ((NodeImpl) nodeParent).getMetaData("UniqueId"); } String compensationEvent; if( activityRef.length() == 0 ) { // general/implicit compensation compensationEvent = CompensationScope.IMPLICIT_COMPENSATION_PREFIX + parentId; } else { // specific compensation compensationEvent = activityRef; } DroolsConsequenceAction compensationAction = new DroolsConsequenceAction("java", PROCESS_INSTANCE_SIGNAL_EVENT + "Compensation\", \"" + compensationEvent + "\");"); if( throwEventNode instanceof ActionNode ) { ((ActionNode) throwEventNode).setAction(compensationAction); } else if( throwEventNode instanceof EndNode ) { List<DroolsAction> actions = new ArrayList<DroolsAction>(); actions.add(compensationAction); ((EndNode) throwEventNode).setActions(EndNode.EVENT_NODE_ENTER, actions); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package services; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.logging.Logger; import org.apache.commons.lang.StringUtils; import org.apache.nuvem.cloud.data.DocumentService; import org.apache.nuvem.cloud.user.User; import org.apache.nuvem.cloud.user.UserService; import org.apache.nuvem.cloud.xmpp.api.JID; import org.apache.tuscany.sca.data.collection.Entry; import org.apache.tuscany.sca.data.collection.NotFoundException; import org.oasisopen.sca.annotation.Reference; import org.oasisopen.sca.annotation.Scope; @Scope("COMPOSITE") public class ShoppingCartManager implements ShoppingCart { private static final Logger log = Logger .getLogger(ShoppingCartManager.class.getName()); private static String ANONYMOUS = "anonymous"; @Reference private DocumentService documentService; @Reference private UserService userService; @Reference private ShipmentService shipmentService; public Entry<String, Item>[] getAll() { Map<String, Item> cart = getUserShoppingCart(); Entry<String, Item>[] entries = new Entry[cart.size()]; int i = 0; for (Map.Entry<String, Item> e : cart.entrySet()) { entries[i++] = new Entry<String, Item>(e.getKey(), e.getValue()); } return entries; } public Item get(String key) throws NotFoundException { Map<String, Item> cart = getUserShoppingCart(); Item item = cart.get(key); if (item == null) { throw new NotFoundException(key); } else { return item; } } public String post(String key, Item item) { Map<String, Item> cart = getUserShoppingCart(); if (key == null || key.isEmpty()) { key = this.generateItemKey(); } // add to the cart map cart.put(key, item); // add back to the store documentService.post(getCartKey(), cart); return key; } public void put(String key, Item item) throws NotFoundException { Map<String, Item> cart = getUserShoppingCart(); if (!cart.containsKey(key)) { throw new NotFoundException(key); } // add to the cart map cart.put(key, item); // add back to the store documentService.put(getCartKey(), cart); } public void delete(String key) throws NotFoundException { if (key == null || key.isEmpty()) { documentService.delete(getCartKey()); } else { Map<String, Item> cart = getUserShoppingCart(); Item item = cart.remove(key); if (item == null) { throw new NotFoundException(key); } documentService.put(getCartKey(), cart); } } public Entry<String, Item>[] query(String queryString) { throw new UnsupportedOperationException("Operation not supported !"); } public String getTotal() { double total = 0; String currencySymbol = ""; Map<String, Item> cart = getUserShoppingCart(); if (!cart.isEmpty()) { Item item = cart.values().iterator().next(); currencySymbol = item.getCurrencySymbol(); } for (Item item : cart.values()) { total += item.getPrice(); } return currencySymbol + String.valueOf(total); } /** * Utility functions */ private Map<String, Item> getUserShoppingCart() { String userCartKey = getCartKey(); HashMap<String, Item> cart; try { cart = (HashMap<String, Item>) documentService.get(userCartKey); } catch (NotFoundException e) { cart = new HashMap<String, Item>(); documentService.post(userCartKey, cart); } return cart; } private String getUserId() { String userId = null; if (userService != null) { try { User user = userService.getCurrentUser(); userId = user.getUserId(); } catch (Exception e) { // ignore e.printStackTrace(); } } if (userId == null || userId.length() == 0) { userId = ANONYMOUS; } return userId; } private String getCartKey() { String cartKey = "cart-" + this.getUserId(); return cartKey; } private String generateItemKey() { String itemKey = getCartKey() + "-item-" + UUID.randomUUID().toString(); return itemKey; } @Override public String shipItems(String jid) { if (getUserShoppingCart() == null || getUserShoppingCart().isEmpty()) return StringUtils.EMPTY; if (jid == null) { log .warning("using current user's email address for shipment updates"); jid = userService.getCurrentUser().getEmail(); } List<Item> items = new ArrayList<Item>(); items.addAll(getUserShoppingCart().values()); try { return shipmentService.shipItemsAndRegisterForUpdates( Address.DUMMY_ADDRESS, new JID(jid), items); } catch (ShipmentException e) { log.severe("error occured during shipment"); return "shipment error"; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.facebook; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.camel.Category; import org.apache.camel.Consumer; import org.apache.camel.NoTypeConversionAvailableException; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.component.facebook.config.FacebookEndpointConfiguration; import org.apache.camel.component.facebook.config.FacebookNameStyle; import org.apache.camel.component.facebook.data.FacebookMethodsType; import org.apache.camel.component.facebook.data.FacebookPropertiesHelper; import org.apache.camel.spi.Metadata; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.spi.UriPath; import org.apache.camel.support.DefaultEndpoint; import org.apache.camel.support.PropertyBindingSupport; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.convertToGetMethod; import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.convertToSearchMethod; import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.getCandidateMethods; import static org.apache.camel.component.facebook.data.FacebookMethodsTypeHelper.getMissingProperties; import static org.apache.camel.component.facebook.data.FacebookPropertiesHelper.getEndpointPropertyNames; /** * Send requests to Facebook APIs supported by Facebook4J. * * It allows producing messages to retrieve, add, and delete posts, likes, comments, photos, albums, videos, photos, * checkins, locations, links, etc. It also supports APIs that allow polling for posts, users, checkins, groups, * locations, etc. */ @UriEndpoint(firstVersion = "2.14.0", scheme = "facebook", title = "Facebook", syntax = "facebook:methodName", category = { Category.SOCIAL }) public class FacebookEndpoint extends DefaultEndpoint implements FacebookConstants { private static final Logger LOG = LoggerFactory.getLogger(FacebookEndpoint.class); private FacebookNameStyle nameStyle; @UriPath(name = "methodName", description = "What operation to perform") @Metadata(required = true) private String method; private FacebookMethodsType methodName; @UriParam private FacebookEndpointConfiguration configuration; @UriParam private String inBody; // candidate methods based on method name and endpoint configuration private List<FacebookMethodsType> candidates; public FacebookEndpoint(String uri, FacebookComponent facebookComponent, String remaining, FacebookEndpointConfiguration configuration) throws NoTypeConversionAvailableException { super(uri, facebookComponent); this.configuration = configuration; this.method = remaining; } @Override public Producer createProducer() throws Exception { return new FacebookProducer(this); } @Override public Consumer createConsumer(Processor processor) throws Exception { // make sure inBody is not set for consumers if (inBody != null) { throw new IllegalArgumentException("Option inBody is not supported for consumer endpoint"); } final FacebookConsumer consumer = new FacebookConsumer(this, processor); // also set consumer.* properties configureConsumer(consumer); return consumer; } @Override public void configureProperties(Map<String, Object> options) { super.configureProperties(options); // set configuration properties first try { if (configuration == null) { configuration = new FacebookEndpointConfiguration(); } PropertyBindingSupport.bindProperties(getCamelContext(), configuration, options); } catch (Exception e) { throw new IllegalArgumentException(e.getMessage(), e); } // extract reading properties FacebookPropertiesHelper.configureReadingProperties(configuration, options); // validate configuration configuration.validate(); // validate and initialize state initState(); } private void initState() { // get endpoint property names final Set<String> arguments = new HashSet<>(); arguments.addAll(getEndpointPropertyNames(getCamelContext(), configuration)); // add inBody argument for producers if (inBody != null) { arguments.add(inBody); } final String[] argNames = arguments.toArray(new String[arguments.size()]); candidates = new ArrayList<>(); candidates.addAll(getCandidateMethods(method, argNames)); if (!candidates.isEmpty()) { // found an exact name match, allows disambiguation if needed this.nameStyle = FacebookNameStyle.EXACT; } else { // also search for long forms of method name, both get* and search* // Note that this set will be further sorted by Producers and Consumers // producers will prefer get* forms, and consumers should prefer search* forms candidates.addAll(getCandidateMethods(convertToGetMethod(method), argNames)); if (!candidates.isEmpty()) { this.nameStyle = FacebookNameStyle.GET; } int nGetMethods = candidates.size(); candidates.addAll(getCandidateMethods(convertToSearchMethod(method), argNames)); // error if there are no candidates if (candidates.isEmpty()) { throw new IllegalArgumentException( String.format("No matching operation for %s, with arguments %s", method, arguments)); } if (nameStyle == null) { // no get* methods found nameStyle = FacebookNameStyle.SEARCH; } else if (candidates.size() > nGetMethods) { // get* and search* methods found nameStyle = FacebookNameStyle.GET_AND_SEARCH; } } // log missing/extra properties for debugging if (LOG.isDebugEnabled()) { final Set<String> missing = getMissingProperties(method, nameStyle, arguments); if (!missing.isEmpty()) { LOG.debug("Method {} could use one or more properties from {}", method, missing); } } } public FacebookEndpointConfiguration getConfiguration() { return configuration; } public List<FacebookMethodsType> getCandidates() { return Collections.unmodifiableList(candidates); } public String getInBody() { return inBody; } public String getMethod() { return method; } public FacebookNameStyle getNameStyle() { return nameStyle; } /** * Sets the name of a parameter to be passed in the exchange In Body */ public void setInBody(String inBody) { // validate property name ObjectHelper.notNull(inBody, "inBody"); if (!FacebookPropertiesHelper.getValidEndpointProperties().contains(inBody)) { throw new IllegalArgumentException("Unknown property " + inBody); } this.inBody = inBody; } /** * Sets the {@link FacebookEndpointConfiguration} to use * * @param configuration the {@link FacebookEndpointConfiguration} to use */ public void setConfiguration(FacebookEndpointConfiguration configuration) { this.configuration = configuration; } }
package edu.umass.cs.pig.test; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.util.Map; import java.util.Properties; import java.util.Random; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pig.ExecType; import org.apache.pig.PigServer; import org.apache.pig.backend.executionengine.ExecException; import org.apache.pig.builtin.PigStorage; import org.apache.pig.data.DataBag; import org.apache.pig.impl.PigContext; import org.apache.pig.impl.io.FileLocalizer; import org.apache.pig.newplan.Operator; import org.apache.pig.test.MiniCluster; import org.apache.pig.test.SessionIdentifierGenerator; import org.apache.pig.test.Util; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; public class TestExGen4Join { static PigContext pigContext = new PigContext(ExecType.LOCAL, new Properties()); private final Log log = LogFactory.getLog(getClass()); // private OSValidator z3 = OSValidator.get(); // private Z3Context z4 = Z3Context.get(); static int MAX = 100; static int MIN = 10; static String A, B, C, D, E, F, G; static int start = 0; static File fileA, fileB, fileC, fileD, fileE, fileF, fileG; { try { pigContext.connect(); } catch (ExecException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @BeforeClass public static void oneTimeSetup() throws Exception { fileA = File.createTempFile("dataA", ".dat"); fileB = File.createTempFile("dataB", ".dat"); fileC = File.createTempFile("dataC", ".dat"); fileD = File.createTempFile("dataD", ".dat"); fileE = File.createTempFile("dataE", ".dat"); fileF = File.createTempFile("dataF", ".dat"); fileG = File.createTempFile("dataG", ".dat"); writeData(fileA); writeData(fileB); writeData2(fileC); writeData3(fileD); writeData3(fileE); writeData4(fileF); writeData5(fileG); fileA.deleteOnExit(); fileB.deleteOnExit(); fileC.deleteOnExit(); fileD.deleteOnExit(); fileE.deleteOnExit(); fileF.deleteOnExit(); fileG.deleteOnExit(); A = "'" + fileA.getPath() + "'"; B = "'" + fileB.getPath() + "'"; C = "'" + fileC.getPath() + "'"; D = "'" + fileD.getPath() + "'"; E = "'" + fileE.getPath() + "'"; F = "'" + fileF.getPath() + "'"; G = "'" + fileG.getPath() + "'"; System.out.println("A : " + A + "\n" + "B : " + B + "\n" + "C : " + C + "\n" + "D : " + D + "\n" + "E : " + E + "\n"); System.out.println("Test data created."); } private static void writeData(File dataFile) throws Exception { // File dataFile = File.createTempFile(name, ".dat"); FileOutputStream dat = new FileOutputStream(dataFile); Random rand = new Random(); for (int i = 0; i < MIN; i++) dat.write((rand.nextInt(10) + "\t" + rand.nextInt(10) + "\n") .getBytes()); dat.close(); } private static void writeData2(File dataFile) throws Exception { // File dataFile = File.createTempFile(name, ".dat"); FileOutputStream dat = new FileOutputStream(dataFile); Random rand = new Random(); for (int i = 0; i < MIN; i++) dat.write((rand.nextInt(100) + "\t" + rand.nextInt(100) + "\n") .getBytes()); dat.close(); } private static void writeData3(File dataFile) throws Exception { // File dataFile = File.createTempFile(name, ".dat"); FileOutputStream dat = new FileOutputStream(dataFile); for (int i = 0; i < MIN; i++) dat.write((start++ + "\t" + start++ + "\n") .getBytes()); dat.close(); } private static void writeData4(File dataFile) throws Exception { // File dataFile = File.createTempFile(name, ".dat"); FileOutputStream dat = new FileOutputStream(dataFile); for (int i = 0; i < MIN; i++) dat.write(("ba" + "\t" + start++ + "\n") .getBytes()); dat.close(); } private static void writeData5(File dataFile) throws Exception { // File dataFile = File.createTempFile(name, ".dat"); FileOutputStream dat = new FileOutputStream(dataFile); for (int i = 0; i < MIN; i++) dat.write(("ab" + "\t" + start++ + "\n") .getBytes()); dat.close(); } @Test public void testJoin() throws IOException, ExecException { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A1 = load " + A + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B1 = load " + B + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("E = join A1 by x, B1 by x;"); Map<Operator, DataBag> derivedData = pigServer.getExamples2("E"); assertTrue(derivedData != null); } @Test public void testJoin2() throws IOException, ExecException { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A1 = load " + A + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B1 = load " + A + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("E = join A1 by x, B1 by x;"); Map<Operator, DataBag> derivedData = pigServer.getExamples2("E"); assertTrue(derivedData != null); } @Test public void testJoin3() throws IOException, ExecException { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A1 = load " + C + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B1 = load " + A + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("E = join A1 by x, B1 by x;"); Map<Operator, DataBag> derivedData = pigServer.getExamples2("E"); assertTrue(derivedData != null); } @Test public void testJoin4() throws IOException, ExecException { PigServer pigServer = new PigServer(pigContext); pigServer.setBatchOn(); pigServer.registerQuery("A1 = load " + C + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B1 = load " + A + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B = join A1 by x, B1 by x;"); pigServer.registerQuery("D = foreach B generate A1::x as a1x, A1::y as a1y, B1::y as b1y;"); Map<Operator, DataBag> derivedData = pigServer.getExamples2("D"); assertTrue(derivedData != null); } @Test public void testJoin5() throws IOException, ExecException { File out = File.createTempFile("testFilterGroupCountStoreOutput", ""); out.deleteOnExit(); out.delete(); PigServer pigServer = new PigServer(pigContext); pigServer.setBatchOn(); pigServer.registerQuery("A1 = load " + C + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B1 = load " + A + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B = join A1 by (x, y), B1 by (x, y);"); pigServer.registerQuery("store B into '" + out.getAbsolutePath() + "';"); Map<Operator, DataBag> derivedData = pigServer.getExamples2(null); assertTrue(derivedData != null); } /*bug in illustrate * Would report the following error trace: * org.apache.pig.backend.executionengine.ExecException: ERROR 1071: Cannot convert a tuple to an Integer at org.apache.pig.data.DataType.toInteger(DataType.java:582) at org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POCast.getNext(POCast.java:150) at org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator.getNext(PhysicalOperator.java:328) at org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POForEach.processPlan(POForEach.java:332) at org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POForEach.getNext(POForEach.java:284) at org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator.processInput(PhysicalOperator.java:290) at org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLocalRearrange.getNext(POLocalRearrange.java:256) at org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POUnion.getNext(POUnion.java:165) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapBase.runPipeline(PigMapBase.java:261) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapBase.map(PigMapBase.java:256) at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigMapBase.map(PigMapBase.java:58) at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:144) at org.apache.pig.pen.LocalMapReduceSimulator.launchPig(LocalMapReduceSimulator.java:205) at org.apache.pig.pen.ExampleGenerator.getData(ExampleGenerator.java:257) at org.apache.pig.pen.ExampleGenerator.getData(ExampleGenerator.java:238) at org.apache.pig.pen.LineageTrimmingVisitor.init(LineageTrimmingVisitor.java:103) at org.apache.pig.pen.LineageTrimmingVisitor.<init>(LineageTrimmingVisitor.java:98) at org.apache.pig.pen.ExampleGenerator.getExamples(ExampleGenerator.java:185) at org.apache.pig.PigServer.getExamples(PigServer.java:1258) at org.apache.pig.tools.grunt.GruntParser.processIllustrate(GruntParser.java:698) at org.apache.pig.tools.pigscript.parser.PigScriptParser.Illustrate(PigScriptParser.java:591) at org.apache.pig.tools.pigscript.parser.PigScriptParser.parse(PigScriptParser.java:306) at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:188) at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:164) at org.apache.pig.tools.grunt.Grunt.run(Grunt.java:67) at org.apache.pig.Main.run(Main.java:487) at org.apache.pig.Main.main(Main.java:108) 2012-04-25 15:18:49,609 [main] ERROR org.apache.pig.tools.grunt.Grunt - ERROR 2997: Encountered IOException. ExecException : Cannot convert a tuple to an Integer Details at logfile: /nfs/ktl/home1/kaituo/pig_1335381455900.log */ // @Test // public void testJoin6() throws IOException, ExecException { // File out = File.createTempFile("testFilterGroupCountStoreOutput", ""); // out.deleteOnExit(); // out.delete(); // // PigServer pigServer = new PigServer(pigContext); // pigServer.setBatchOn(); // // pigServer.registerQuery("A1 = load " + C + " as (x:int, y:int);"); // pigServer.registerQuery("B1 = load " + A + " as (x:int, w:int);"); // // pigServer.registerQuery("B = join A1 by x, B1 by x;"); // // pigServer.registerQuery("C = filter B by A1::x < B1::w;"); // // Map<Operator, DataBag> derivedData = pigServer.getExamples("C"); // // assertTrue(derivedData != null); // } @Test public void testJoin7() throws IOException, ExecException { File out = File.createTempFile("testFilterGroupCountStoreOutput", ""); out.deleteOnExit(); out.delete(); PigServer pigServer = new PigServer(pigContext); pigServer.setBatchOn(); pigServer.registerQuery("A1 = load " + D + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B1 = load " + E + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B = join A1 by (x, y), B1 by (x, y);"); pigServer.registerQuery("store B into '" + out.getAbsolutePath() + "';"); Map<Operator, DataBag> derivedData = pigServer.getExamples2(null); assertTrue(derivedData != null); } @Test public void testJoin8() throws IOException, ExecException { PigServer pigServer = new PigServer(pigContext); pigServer.setBatchOn(); pigServer.registerQuery("A1 = load " + D + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B1 = load " + E + " using PigStorage() as (x:int, w:int);"); pigServer.registerQuery("B = join A1 by x, B1 by x;"); //pigServer.registerQuery("store B into '" + out.getAbsolutePath() + "';"); Map<Operator, DataBag> derivedData = pigServer.getExamples2("B"); assertTrue(derivedData != null); } @Test public void testJoin9() throws IOException, ExecException { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A=load " + F + " using PigStorage() as (x : chararray, y : int);"); pigServer.registerQuery("C=load " + G + " using PigStorage() as (x : chararray, y : int);"); String query = "B = join A by x, C by x;"; log.info(query); pigServer.registerQuery(query); Map<Operator, DataBag> derivedData = pigServer.getExamples2("B"); assertTrue(derivedData != null); } @Test public void testJoin10() throws IOException, ExecException { PigServer pigServer = new PigServer(pigContext); pigServer.registerQuery("A1 = load " + A + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("B1 = load " + B + " using PigStorage() as (x:int, y:int);"); pigServer.registerQuery("C = join A1 by x, B1 by x;"); pigServer.registerQuery("E = distinct C;"); Map<Operator, DataBag> derivedData = pigServer.getExamples2("E"); assertTrue(derivedData != null); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal; import org.apache.ignite.*; import org.apache.ignite.cluster.*; import org.apache.ignite.compute.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.resources.*; import org.apache.ignite.testframework.junits.common.*; import java.io.*; import java.util.*; import java.util.concurrent.atomic.*; /** * Test for various job callback annotations. */ @GridCommonTest(group = "Kernal Self") public class GridContinuousJobAnnotationSelfTest extends GridCommonAbstractTest { /** */ private static final AtomicBoolean fail = new AtomicBoolean(); /** */ private static final AtomicInteger afterSendCnt = new AtomicInteger(); /** */ private static final AtomicInteger beforeFailoverCnt = new AtomicInteger(); /** */ private static final AtomicReference<Exception> err = new AtomicReference<>(); /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration c = super.getConfiguration(gridName); c.setMarshalLocalJobs(false); return c; } /** * @throws Exception If test failed. */ public void testJobAnnotation() throws Exception { testContinuousJobAnnotation(TestJob.class); } /** * @throws Exception If test failed. */ public void testJobChildAnnotation() throws Exception { testContinuousJobAnnotation(TestJobChild.class); } /** * @param jobCls Job class. * @throws Exception If test failed. */ public void testContinuousJobAnnotation(Class<?> jobCls) throws Exception { try { Ignite ignite = startGrid(0); startGrid(1); fail.set(true); ignite.compute().execute(TestTask.class, jobCls); Exception e = err.get(); if (e != null) throw e; } finally { stopGrid(0); stopGrid(1); } assertEquals(2, afterSendCnt.getAndSet(0)); assertEquals(1, beforeFailoverCnt.getAndSet(0)); } /** */ @SuppressWarnings({"PublicInnerClass", "unused"}) public static class TestTask implements ComputeTask<Object, Object> { /** */ @TaskContinuousMapperResource private ComputeTaskContinuousMapper mapper; /** {@inheritDoc} */ @Override public Map<? extends ComputeJob, ClusterNode> map(List<ClusterNode> subgrid, Object arg) { try { mapper.send(((Class<ComputeJob>)arg).newInstance()); } catch (Exception e) { throw new IgniteException("Job instantination failed.", e); } return null; } /** {@inheritDoc} */ @Override public ComputeJobResultPolicy result(ComputeJobResult res, List<ComputeJobResult> received) throws IgniteException { if (res.getException() != null) { if (res.getException() instanceof ComputeUserUndeclaredException) throw new IgniteException("Job threw unexpected exception.", res.getException()); return ComputeJobResultPolicy.FAILOVER; } return ComputeJobResultPolicy.WAIT; } /** {@inheritDoc} */ @Override public Object reduce(List<ComputeJobResult> results) throws IgniteException { assert results.size() == 1 : "Unexpected result count: " + results.size(); return null; } } /** * */ private static class TestJob extends ComputeJobAdapter { /** */ private boolean flag = true; /** */ TestJob() { X.println("Constructing TestJob [this=" + this + ", identity=" + System.identityHashCode(this) + "]"); } /** */ @ComputeJobAfterSend private void afterSend() { X.println("AfterSend start TestJob [this=" + this + ", identity=" + System.identityHashCode(this) + ", flag=" + flag + "]"); afterSendCnt.incrementAndGet(); flag = false; X.println("AfterSend end TestJob [this=" + this + ", identity=" + System.identityHashCode(this) + ", flag=" + flag + "]"); } /** */ @ComputeJobBeforeFailover private void beforeFailover() { X.println("BeforeFailover start TestJob [this=" + this + ", identity=" + System.identityHashCode(this) + ", flag=" + flag + "]"); beforeFailoverCnt.incrementAndGet(); flag = true; X.println("BeforeFailover end TestJob [this=" + this + ", identity=" + System.identityHashCode(this) + ", flag=" + flag + "]"); } /** {@inheritDoc} */ @Override public Serializable execute() throws IgniteException { X.println("Execute TestJob [this=" + this + ", identity=" + System.identityHashCode(this) + ", flag=" + flag + "]"); if (!flag) { String msg = "Flag is false on execute [this=" + this + ", identity=" + System.identityHashCode(this) + ", flag=" + flag + "]"; X.println(msg); err.compareAndSet(null, new Exception(msg)); } if (fail.get()) { fail.set(false); throw new IgniteException("Expected test exception."); } return null; } } /** * */ private static class TestJobChild extends TestJob { /** * Required for reflectional creation. */ TestJobChild() { // No-op. } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.mapper.internal; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.AlreadyExpiredException; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NumericLongAnalyzer; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RootMapper; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.core.LongFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeTimeValue; import static org.elasticsearch.index.mapper.MapperBuilders.ttl; public class TTLFieldMapper extends LongFieldMapper implements RootMapper { public static final String NAME = "_ttl"; public static final String CONTENT_TYPE = "_ttl"; public static class Defaults extends LongFieldMapper.Defaults { public static final String NAME = TTLFieldMapper.CONTENT_TYPE; public static final MappedFieldType TTL_FIELD_TYPE = new TTLFieldType(); static { TTL_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); TTL_FIELD_TYPE.setStored(true); TTL_FIELD_TYPE.setTokenized(false); TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); TTL_FIELD_TYPE.setNames(new MappedFieldType.Names(NAME)); TTL_FIELD_TYPE.freeze(); } public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED; public static final long DEFAULT = -1; } public static class Builder extends NumberFieldMapper.Builder<Builder, TTLFieldMapper> { private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; private long defaultTTL = Defaults.DEFAULT; public Builder() { super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); } public Builder enabled(EnabledAttributeMapper enabled) { this.enabledState = enabled; return builder; } public Builder defaultTTL(long defaultTTL) { this.defaultTTL = defaultTTL; return builder; } @Override public TTLFieldMapper build(BuilderContext context) { setupFieldType(context); return new TTLFieldMapper(fieldType, enabledState, defaultTTL, ignoreMalformed(context),coerce(context), fieldDataSettings, context.indexSettings()); } @Override protected NamedAnalyzer makeNumberAnalyzer(int precisionStep) { return NumericLongAnalyzer.buildNamedAnalyzer(precisionStep); } @Override protected int maxPrecisionStep() { return 64; } } public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException { TTLFieldMapper.Builder builder = ttl(); for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry<String, Object> entry = iterator.next(); String fieldName = Strings.toUnderscoreCase(entry.getKey()); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { EnabledAttributeMapper enabledState = nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; builder.enabled(enabledState); iterator.remove(); } else if (fieldName.equals("default")) { TimeValue ttlTimeValue = nodeTimeValue(fieldNode, null); if (ttlTimeValue != null) { builder.defaultTTL(ttlTimeValue.millis()); } iterator.remove(); } } return builder; } } static final class TTLFieldType extends LongFieldType { public TTLFieldType() { } protected TTLFieldType(TTLFieldType ref) { super(ref); } @Override public LongFieldType clone() { return new TTLFieldType(this); } // Overrides valueForSearch to display live value of remaining ttl @Override public Object valueForSearch(Object value) { long now; SearchContext searchContext = SearchContext.current(); if (searchContext != null) { now = searchContext.nowInMillis(); } else { now = System.currentTimeMillis(); } long val = value(value); return val - now; } } private EnabledAttributeMapper enabledState; private long defaultTTL; public TTLFieldMapper(Settings indexSettings) { this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, Defaults.IGNORE_MALFORMED, Defaults.COERCE, null, indexSettings); } protected TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, @Nullable Settings fieldDataSettings, Settings indexSettings) { super(fieldType, false, ignoreMalformed, coerce, fieldDataSettings, indexSettings, MultiFields.empty(), null); this.enabledState = enabled; this.defaultTTL = defaultTTL; } public boolean enabled() { return this.enabledState.enabled; } public long defaultTTL() { return this.defaultTTL; } // Other implementation for realtime get display public Object valueForSearch(long expirationTime) { return expirationTime - System.currentTimeMillis(); } @Override public void preParse(ParseContext context) throws IOException { } @Override public void postParse(ParseContext context) throws IOException { super.parse(context); } @Override public Mapper parse(ParseContext context) throws IOException, MapperParsingException { if (context.sourceToParse().ttl() < 0) { // no ttl has been provided externally long ttl; if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) { ttl = TimeValue.parseTimeValue(context.parser().text(), null, "ttl").millis(); } else { ttl = context.parser().longValue(coerce.value()); } if (ttl <= 0) { throw new MapperParsingException("TTL value must be > 0. Illegal value provided [" + ttl + "]"); } context.sourceToParse().ttl(ttl); } return null; } @Override protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException, AlreadyExpiredException { if (enabledState.enabled && !context.sourceToParse().flyweight()) { long ttl = context.sourceToParse().ttl(); if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value ttl = defaultTTL; context.sourceToParse().ttl(ttl); } if (ttl > 0) { // a ttl has been provided either externally or in the _source long timestamp = context.sourceToParse().timestamp(); long expire = new Date(timestamp + ttl).getTime(); long now = System.currentTimeMillis(); // there is not point indexing already expired doc if (context.sourceToParse().origin() == SourceToParse.Origin.PRIMARY && now >= expire) { throw new AlreadyExpiredException(context.index(), context.type(), context.id(), timestamp, ttl, now); } // the expiration timestamp (timestamp + ttl) is set as field fields.add(new CustomLongNumericField(this, expire, fieldType())); } } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { boolean includeDefaults = params.paramAsBoolean("include_defaults", false); // if all are defaults, no sense to write it at all if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && defaultTTL == Defaults.DEFAULT) { return builder; } builder.startObject(CONTENT_TYPE); if (includeDefaults || enabledState != Defaults.ENABLED_STATE) { builder.field("enabled", enabledState.enabled); } if (includeDefaults || defaultTTL != Defaults.DEFAULT && enabledState.enabled) { builder.field("default", defaultTTL); } builder.endObject(); return builder; } @Override public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith; if (((TTLFieldMapper) mergeWith).enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with if (this.enabledState == EnabledAttributeMapper.ENABLED && ((TTLFieldMapper) mergeWith).enabledState == EnabledAttributeMapper.DISABLED) { mergeResult.addConflict("_ttl cannot be disabled once it was enabled."); } else { if (!mergeResult.simulate()) { this.enabledState = ttlMergeWith.enabledState; } } } if (ttlMergeWith.defaultTTL != -1) { // we never build the default when the field is disabled so we should also not set it // (it does not make a difference though as everything that is not build in toXContent will also not be set in the cluster) if (!mergeResult.simulate() && (enabledState == EnabledAttributeMapper.ENABLED)) { this.defaultTTL = ttlMergeWith.defaultTTL; } } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2020_03_01; import com.microsoft.azure.arm.model.HasInner; import com.microsoft.azure.management.network.v2020_03_01.implementation.ExpressRouteCircuitPeeringInner; import com.microsoft.azure.arm.model.Indexable; import com.microsoft.azure.arm.model.Refreshable; import com.microsoft.azure.arm.model.Updatable; import com.microsoft.azure.arm.model.Appliable; import com.microsoft.azure.arm.model.Creatable; import com.microsoft.azure.arm.resources.models.HasManager; import com.microsoft.azure.management.network.v2020_03_01.implementation.NetworkManager; import com.microsoft.azure.SubResource; import java.util.List; import com.microsoft.azure.management.network.v2020_03_01.implementation.ExpressRouteCircuitConnectionInner; import com.microsoft.azure.management.network.v2020_03_01.implementation.ExpressRouteCircuitStatsInner; /** * Type representing ExpressRouteCircuitPeering. */ public interface ExpressRouteCircuitPeering extends HasInner<ExpressRouteCircuitPeeringInner>, Indexable, Refreshable<ExpressRouteCircuitPeering>, Updatable<ExpressRouteCircuitPeering.Update>, HasManager<NetworkManager> { /** * @return the azureASN value. */ Integer azureASN(); /** * @return the connections value. */ List<ExpressRouteCircuitConnection> connections(); /** * @return the etag value. */ String etag(); /** * @return the expressRouteConnection value. */ ExpressRouteConnectionId expressRouteConnection(); /** * @return the gatewayManagerEtag value. */ String gatewayManagerEtag(); /** * @return the id value. */ String id(); /** * @return the ipv6PeeringConfig value. */ Ipv6ExpressRouteCircuitPeeringConfig ipv6PeeringConfig(); /** * @return the lastModifiedBy value. */ String lastModifiedBy(); /** * @return the microsoftPeeringConfig value. */ ExpressRouteCircuitPeeringConfig microsoftPeeringConfig(); /** * @return the name value. */ String name(); /** * @return the peerASN value. */ Long peerASN(); /** * @return the peeredConnections value. */ List<PeerExpressRouteCircuitConnection> peeredConnections(); /** * @return the peeringType value. */ ExpressRoutePeeringType peeringType(); /** * @return the primaryAzurePort value. */ String primaryAzurePort(); /** * @return the primaryPeerAddressPrefix value. */ String primaryPeerAddressPrefix(); /** * @return the provisioningState value. */ ProvisioningState provisioningState(); /** * @return the routeFilter value. */ SubResource routeFilter(); /** * @return the secondaryAzurePort value. */ String secondaryAzurePort(); /** * @return the secondaryPeerAddressPrefix value. */ String secondaryPeerAddressPrefix(); /** * @return the sharedKey value. */ String sharedKey(); /** * @return the state value. */ ExpressRoutePeeringState state(); /** * @return the stats value. */ ExpressRouteCircuitStats stats(); /** * @return the type value. */ String type(); /** * @return the vlanId value. */ Integer vlanId(); /** * The entirety of the ExpressRouteCircuitPeering definition. */ interface Definition extends DefinitionStages.Blank, DefinitionStages.WithExpressRouteCircuit, DefinitionStages.WithCreate { } /** * Grouping of ExpressRouteCircuitPeering definition stages. */ interface DefinitionStages { /** * The first stage of a ExpressRouteCircuitPeering definition. */ interface Blank extends WithExpressRouteCircuit { } /** * The stage of the expressroutecircuitpeering definition allowing to specify ExpressRouteCircuit. */ interface WithExpressRouteCircuit { /** * Specifies resourceGroupName, circuitName. * @param resourceGroupName The name of the resource group * @param circuitName The name of the express route circuit * @return the next definition stage */ WithCreate withExistingExpressRouteCircuit(String resourceGroupName, String circuitName); } /** * The stage of the expressroutecircuitpeering definition allowing to specify AzureASN. */ interface WithAzureASN { /** * Specifies azureASN. * @param azureASN The Azure ASN * @return the next definition stage */ WithCreate withAzureASN(Integer azureASN); } /** * The stage of the expressroutecircuitpeering definition allowing to specify Connections. */ interface WithConnections { /** * Specifies connections. * @param connections The list of circuit connections associated with Azure Private Peering for this circuit * @return the next definition stage */ WithCreate withConnections(List<ExpressRouteCircuitConnectionInner> connections); } /** * The stage of the expressroutecircuitpeering definition allowing to specify ExpressRouteConnection. */ interface WithExpressRouteConnection { /** * Specifies expressRouteConnection. * @param expressRouteConnection The ExpressRoute connection * @return the next definition stage */ WithCreate withExpressRouteConnection(ExpressRouteConnectionId expressRouteConnection); } /** * The stage of the expressroutecircuitpeering definition allowing to specify GatewayManagerEtag. */ interface WithGatewayManagerEtag { /** * Specifies gatewayManagerEtag. * @param gatewayManagerEtag The GatewayManager Etag * @return the next definition stage */ WithCreate withGatewayManagerEtag(String gatewayManagerEtag); } /** * The stage of the expressroutecircuitpeering definition allowing to specify Id. */ interface WithId { /** * Specifies id. * @param id Resource ID * @return the next definition stage */ WithCreate withId(String id); } /** * The stage of the expressroutecircuitpeering definition allowing to specify Ipv6PeeringConfig. */ interface WithIpv6PeeringConfig { /** * Specifies ipv6PeeringConfig. * @param ipv6PeeringConfig The IPv6 peering configuration * @return the next definition stage */ WithCreate withIpv6PeeringConfig(Ipv6ExpressRouteCircuitPeeringConfig ipv6PeeringConfig); } /** * The stage of the expressroutecircuitpeering definition allowing to specify MicrosoftPeeringConfig. */ interface WithMicrosoftPeeringConfig { /** * Specifies microsoftPeeringConfig. * @param microsoftPeeringConfig The Microsoft peering configuration * @return the next definition stage */ WithCreate withMicrosoftPeeringConfig(ExpressRouteCircuitPeeringConfig microsoftPeeringConfig); } /** * The stage of the expressroutecircuitpeering definition allowing to specify Name. */ interface WithName { /** * Specifies name. * @param name The name of the resource that is unique within a resource group. This name can be used to access the resource * @return the next definition stage */ WithCreate withName(String name); } /** * The stage of the expressroutecircuitpeering definition allowing to specify PeerASN. */ interface WithPeerASN { /** * Specifies peerASN. * @param peerASN The peer ASN * @return the next definition stage */ WithCreate withPeerASN(Long peerASN); } /** * The stage of the expressroutecircuitpeering definition allowing to specify PeeringType. */ interface WithPeeringType { /** * Specifies peeringType. * @param peeringType The peering type. Possible values include: 'AzurePublicPeering', 'AzurePrivatePeering', 'MicrosoftPeering' * @return the next definition stage */ WithCreate withPeeringType(ExpressRoutePeeringType peeringType); } /** * The stage of the expressroutecircuitpeering definition allowing to specify PrimaryAzurePort. */ interface WithPrimaryAzurePort { /** * Specifies primaryAzurePort. * @param primaryAzurePort The primary port * @return the next definition stage */ WithCreate withPrimaryAzurePort(String primaryAzurePort); } /** * The stage of the expressroutecircuitpeering definition allowing to specify PrimaryPeerAddressPrefix. */ interface WithPrimaryPeerAddressPrefix { /** * Specifies primaryPeerAddressPrefix. * @param primaryPeerAddressPrefix The primary address prefix * @return the next definition stage */ WithCreate withPrimaryPeerAddressPrefix(String primaryPeerAddressPrefix); } /** * The stage of the expressroutecircuitpeering definition allowing to specify RouteFilter. */ interface WithRouteFilter { /** * Specifies routeFilter. * @param routeFilter The reference to the RouteFilter resource * @return the next definition stage */ WithCreate withRouteFilter(SubResource routeFilter); } /** * The stage of the expressroutecircuitpeering definition allowing to specify SecondaryAzurePort. */ interface WithSecondaryAzurePort { /** * Specifies secondaryAzurePort. * @param secondaryAzurePort The secondary port * @return the next definition stage */ WithCreate withSecondaryAzurePort(String secondaryAzurePort); } /** * The stage of the expressroutecircuitpeering definition allowing to specify SecondaryPeerAddressPrefix. */ interface WithSecondaryPeerAddressPrefix { /** * Specifies secondaryPeerAddressPrefix. * @param secondaryPeerAddressPrefix The secondary address prefix * @return the next definition stage */ WithCreate withSecondaryPeerAddressPrefix(String secondaryPeerAddressPrefix); } /** * The stage of the expressroutecircuitpeering definition allowing to specify SharedKey. */ interface WithSharedKey { /** * Specifies sharedKey. * @param sharedKey The shared key * @return the next definition stage */ WithCreate withSharedKey(String sharedKey); } /** * The stage of the expressroutecircuitpeering definition allowing to specify State. */ interface WithState { /** * Specifies state. * @param state The peering state. Possible values include: 'Disabled', 'Enabled' * @return the next definition stage */ WithCreate withState(ExpressRoutePeeringState state); } /** * The stage of the expressroutecircuitpeering definition allowing to specify Stats. */ interface WithStats { /** * Specifies stats. * @param stats The peering stats of express route circuit * @return the next definition stage */ WithCreate withStats(ExpressRouteCircuitStatsInner stats); } /** * The stage of the expressroutecircuitpeering definition allowing to specify VlanId. */ interface WithVlanId { /** * Specifies vlanId. * @param vlanId The VLAN ID * @return the next definition stage */ WithCreate withVlanId(Integer vlanId); } /** * The stage of the definition which contains all the minimum required inputs for * the resource to be created (via {@link WithCreate#create()}), but also allows * for any other optional settings to be specified. */ interface WithCreate extends Creatable<ExpressRouteCircuitPeering>, DefinitionStages.WithAzureASN, DefinitionStages.WithConnections, DefinitionStages.WithExpressRouteConnection, DefinitionStages.WithGatewayManagerEtag, DefinitionStages.WithId, DefinitionStages.WithIpv6PeeringConfig, DefinitionStages.WithMicrosoftPeeringConfig, DefinitionStages.WithName, DefinitionStages.WithPeerASN, DefinitionStages.WithPeeringType, DefinitionStages.WithPrimaryAzurePort, DefinitionStages.WithPrimaryPeerAddressPrefix, DefinitionStages.WithRouteFilter, DefinitionStages.WithSecondaryAzurePort, DefinitionStages.WithSecondaryPeerAddressPrefix, DefinitionStages.WithSharedKey, DefinitionStages.WithState, DefinitionStages.WithStats, DefinitionStages.WithVlanId { } } /** * The template for a ExpressRouteCircuitPeering update operation, containing all the settings that can be modified. */ interface Update extends Appliable<ExpressRouteCircuitPeering>, UpdateStages.WithAzureASN, UpdateStages.WithConnections, UpdateStages.WithExpressRouteConnection, UpdateStages.WithGatewayManagerEtag, UpdateStages.WithId, UpdateStages.WithIpv6PeeringConfig, UpdateStages.WithMicrosoftPeeringConfig, UpdateStages.WithName, UpdateStages.WithPeerASN, UpdateStages.WithPeeringType, UpdateStages.WithPrimaryAzurePort, UpdateStages.WithPrimaryPeerAddressPrefix, UpdateStages.WithRouteFilter, UpdateStages.WithSecondaryAzurePort, UpdateStages.WithSecondaryPeerAddressPrefix, UpdateStages.WithSharedKey, UpdateStages.WithState, UpdateStages.WithStats, UpdateStages.WithVlanId { } /** * Grouping of ExpressRouteCircuitPeering update stages. */ interface UpdateStages { /** * The stage of the expressroutecircuitpeering update allowing to specify AzureASN. */ interface WithAzureASN { /** * Specifies azureASN. * @param azureASN The Azure ASN * @return the next update stage */ Update withAzureASN(Integer azureASN); } /** * The stage of the expressroutecircuitpeering update allowing to specify Connections. */ interface WithConnections { /** * Specifies connections. * @param connections The list of circuit connections associated with Azure Private Peering for this circuit * @return the next update stage */ Update withConnections(List<ExpressRouteCircuitConnectionInner> connections); } /** * The stage of the expressroutecircuitpeering update allowing to specify ExpressRouteConnection. */ interface WithExpressRouteConnection { /** * Specifies expressRouteConnection. * @param expressRouteConnection The ExpressRoute connection * @return the next update stage */ Update withExpressRouteConnection(ExpressRouteConnectionId expressRouteConnection); } /** * The stage of the expressroutecircuitpeering update allowing to specify GatewayManagerEtag. */ interface WithGatewayManagerEtag { /** * Specifies gatewayManagerEtag. * @param gatewayManagerEtag The GatewayManager Etag * @return the next update stage */ Update withGatewayManagerEtag(String gatewayManagerEtag); } /** * The stage of the expressroutecircuitpeering update allowing to specify Id. */ interface WithId { /** * Specifies id. * @param id Resource ID * @return the next update stage */ Update withId(String id); } /** * The stage of the expressroutecircuitpeering update allowing to specify Ipv6PeeringConfig. */ interface WithIpv6PeeringConfig { /** * Specifies ipv6PeeringConfig. * @param ipv6PeeringConfig The IPv6 peering configuration * @return the next update stage */ Update withIpv6PeeringConfig(Ipv6ExpressRouteCircuitPeeringConfig ipv6PeeringConfig); } /** * The stage of the expressroutecircuitpeering update allowing to specify MicrosoftPeeringConfig. */ interface WithMicrosoftPeeringConfig { /** * Specifies microsoftPeeringConfig. * @param microsoftPeeringConfig The Microsoft peering configuration * @return the next update stage */ Update withMicrosoftPeeringConfig(ExpressRouteCircuitPeeringConfig microsoftPeeringConfig); } /** * The stage of the expressroutecircuitpeering update allowing to specify Name. */ interface WithName { /** * Specifies name. * @param name The name of the resource that is unique within a resource group. This name can be used to access the resource * @return the next update stage */ Update withName(String name); } /** * The stage of the expressroutecircuitpeering update allowing to specify PeerASN. */ interface WithPeerASN { /** * Specifies peerASN. * @param peerASN The peer ASN * @return the next update stage */ Update withPeerASN(Long peerASN); } /** * The stage of the expressroutecircuitpeering update allowing to specify PeeringType. */ interface WithPeeringType { /** * Specifies peeringType. * @param peeringType The peering type. Possible values include: 'AzurePublicPeering', 'AzurePrivatePeering', 'MicrosoftPeering' * @return the next update stage */ Update withPeeringType(ExpressRoutePeeringType peeringType); } /** * The stage of the expressroutecircuitpeering update allowing to specify PrimaryAzurePort. */ interface WithPrimaryAzurePort { /** * Specifies primaryAzurePort. * @param primaryAzurePort The primary port * @return the next update stage */ Update withPrimaryAzurePort(String primaryAzurePort); } /** * The stage of the expressroutecircuitpeering update allowing to specify PrimaryPeerAddressPrefix. */ interface WithPrimaryPeerAddressPrefix { /** * Specifies primaryPeerAddressPrefix. * @param primaryPeerAddressPrefix The primary address prefix * @return the next update stage */ Update withPrimaryPeerAddressPrefix(String primaryPeerAddressPrefix); } /** * The stage of the expressroutecircuitpeering update allowing to specify RouteFilter. */ interface WithRouteFilter { /** * Specifies routeFilter. * @param routeFilter The reference to the RouteFilter resource * @return the next update stage */ Update withRouteFilter(SubResource routeFilter); } /** * The stage of the expressroutecircuitpeering update allowing to specify SecondaryAzurePort. */ interface WithSecondaryAzurePort { /** * Specifies secondaryAzurePort. * @param secondaryAzurePort The secondary port * @return the next update stage */ Update withSecondaryAzurePort(String secondaryAzurePort); } /** * The stage of the expressroutecircuitpeering update allowing to specify SecondaryPeerAddressPrefix. */ interface WithSecondaryPeerAddressPrefix { /** * Specifies secondaryPeerAddressPrefix. * @param secondaryPeerAddressPrefix The secondary address prefix * @return the next update stage */ Update withSecondaryPeerAddressPrefix(String secondaryPeerAddressPrefix); } /** * The stage of the expressroutecircuitpeering update allowing to specify SharedKey. */ interface WithSharedKey { /** * Specifies sharedKey. * @param sharedKey The shared key * @return the next update stage */ Update withSharedKey(String sharedKey); } /** * The stage of the expressroutecircuitpeering update allowing to specify State. */ interface WithState { /** * Specifies state. * @param state The peering state. Possible values include: 'Disabled', 'Enabled' * @return the next update stage */ Update withState(ExpressRoutePeeringState state); } /** * The stage of the expressroutecircuitpeering update allowing to specify Stats. */ interface WithStats { /** * Specifies stats. * @param stats The peering stats of express route circuit * @return the next update stage */ Update withStats(ExpressRouteCircuitStatsInner stats); } /** * The stage of the expressroutecircuitpeering update allowing to specify VlanId. */ interface WithVlanId { /** * Specifies vlanId. * @param vlanId The VLAN ID * @return the next update stage */ Update withVlanId(Integer vlanId); } } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment; import com.google.common.base.Charsets; import com.google.common.base.Throwables; import com.google.common.io.CharSource; import com.google.common.io.LineProcessor; import com.google.common.io.Resources; import io.druid.data.input.impl.DelimitedParseSpec; import io.druid.data.input.impl.DimensionSchema; import io.druid.data.input.impl.DimensionsSpec; import io.druid.data.input.impl.FloatDimensionSchema; import io.druid.data.input.impl.LongDimensionSchema; import io.druid.data.input.impl.StringDimensionSchema; import io.druid.data.input.impl.StringInputRowParser; import io.druid.data.input.impl.TimestampSpec; import io.druid.hll.HyperLogLogHash; import io.druid.java.util.common.logger.Logger; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.DoubleMaxAggregatorFactory; import io.druid.query.aggregation.DoubleMinAggregatorFactory; import io.druid.query.aggregation.DoubleSumAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import io.druid.query.aggregation.hyperloglog.HyperUniquesSerde; import io.druid.query.expression.TestExprMacroTable; import io.druid.segment.column.ValueType; import io.druid.segment.incremental.IncrementalIndex; import io.druid.segment.incremental.IncrementalIndexSchema; import io.druid.segment.serde.ComplexMetrics; import io.druid.segment.virtual.ExpressionVirtualColumn; import org.joda.time.DateTime; import org.joda.time.Interval; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicLong; /** */ public class TestIndex { public static final String[] COLUMNS = new String[]{ "ts", "market", "quality", "qualityLong", "qualityFloat", "qualityNumericString", "placement", "placementish", "index", "partial_null_column", "null_column", "quality_uniques", "indexMin", "indexMaxPlusTen" }; public static final String[] DIMENSIONS = new String[]{ "market", "quality", "qualityLong", "qualityFloat", "qualityNumericString", "placement", "placementish", "partial_null_column", "null_column" }; public static final List<DimensionSchema> DIMENSION_SCHEMAS = Arrays.asList( new StringDimensionSchema("market"), new StringDimensionSchema("quality"), new LongDimensionSchema("qualityLong"), new FloatDimensionSchema("qualityFloat"), new StringDimensionSchema("qualityNumericString"), new StringDimensionSchema("placement"), new StringDimensionSchema("placementish"), new StringDimensionSchema("partial_null_column"), new StringDimensionSchema("null_column") ); public static final DimensionsSpec DIMENSIONS_SPEC = new DimensionsSpec( DIMENSION_SCHEMAS, null, null ); public static final String[] METRICS = new String[]{"index", "indexMin", "indexMaxPlusTen"}; private static final Logger log = new Logger(TestIndex.class); private static final Interval DATA_INTERVAL = new Interval("2011-01-12T00:00:00.000Z/2011-05-01T00:00:00.000Z"); private static final VirtualColumns VIRTUAL_COLUMNS = VirtualColumns.create( Collections.<VirtualColumn>singletonList( new ExpressionVirtualColumn("expr", "index + 10", ValueType.FLOAT, TestExprMacroTable.INSTANCE) ) ); public static final AggregatorFactory[] METRIC_AGGS = new AggregatorFactory[]{ new DoubleSumAggregatorFactory(METRICS[0], METRICS[0]), new DoubleMinAggregatorFactory(METRICS[1], METRICS[0]), new DoubleMaxAggregatorFactory(METRICS[2], VIRTUAL_COLUMNS.getVirtualColumns()[0].getOutputName()), new HyperUniquesAggregatorFactory("quality_uniques", "quality") }; private static final IndexSpec indexSpec = new IndexSpec(); private static final IndexMerger INDEX_MERGER = TestHelper.getTestIndexMerger(); private static final IndexIO INDEX_IO = TestHelper.getTestIndexIO(); static { if (ComplexMetrics.getSerdeForType("hyperUnique") == null) { ComplexMetrics.registerSerde("hyperUnique", new HyperUniquesSerde(HyperLogLogHash.getDefault())); } } private static IncrementalIndex realtimeIndex = null; private static IncrementalIndex noRollupRealtimeIndex = null; private static QueryableIndex mmappedIndex = null; private static QueryableIndex noRollupMmappedIndex = null; private static QueryableIndex mergedRealtime = null; public static IncrementalIndex getIncrementalTestIndex() { synchronized (log) { if (realtimeIndex != null) { return realtimeIndex; } } return realtimeIndex = makeRealtimeIndex("druid.sample.numeric.tsv"); } public static IncrementalIndex getNoRollupIncrementalTestIndex() { synchronized (log) { if (noRollupRealtimeIndex != null) { return noRollupRealtimeIndex; } } return noRollupRealtimeIndex = makeRealtimeIndex("druid.sample.numeric.tsv", false); } public static QueryableIndex getMMappedTestIndex() { synchronized (log) { if (mmappedIndex != null) { return mmappedIndex; } } IncrementalIndex incrementalIndex = getIncrementalTestIndex(); mmappedIndex = persistRealtimeAndLoadMMapped(incrementalIndex); return mmappedIndex; } public static QueryableIndex getNoRollupMMappedTestIndex() { synchronized (log) { if (noRollupMmappedIndex != null) { return noRollupMmappedIndex; } } IncrementalIndex incrementalIndex = getNoRollupIncrementalTestIndex(); noRollupMmappedIndex = persistRealtimeAndLoadMMapped(incrementalIndex); return noRollupMmappedIndex; } public static QueryableIndex mergedRealtimeIndex() { synchronized (log) { if (mergedRealtime != null) { return mergedRealtime; } try { IncrementalIndex top = makeRealtimeIndex("druid.sample.numeric.tsv.top"); IncrementalIndex bottom = makeRealtimeIndex("druid.sample.numeric.tsv.bottom"); File tmpFile = File.createTempFile("yay", "who"); tmpFile.delete(); File topFile = new File(tmpFile, "top"); File bottomFile = new File(tmpFile, "bottom"); File mergedFile = new File(tmpFile, "merged"); topFile.mkdirs(); topFile.deleteOnExit(); bottomFile.mkdirs(); bottomFile.deleteOnExit(); mergedFile.mkdirs(); mergedFile.deleteOnExit(); INDEX_MERGER.persist(top, DATA_INTERVAL, topFile, indexSpec); INDEX_MERGER.persist(bottom, DATA_INTERVAL, bottomFile, indexSpec); mergedRealtime = INDEX_IO.loadIndex( INDEX_MERGER.mergeQueryableIndex( Arrays.asList(INDEX_IO.loadIndex(topFile), INDEX_IO.loadIndex(bottomFile)), true, METRIC_AGGS, mergedFile, indexSpec ) ); return mergedRealtime; } catch (IOException e) { throw Throwables.propagate(e); } } } public static IncrementalIndex makeRealtimeIndex(final String resourceFilename) { return makeRealtimeIndex(resourceFilename, true); } public static IncrementalIndex makeRealtimeIndex(final String resourceFilename, boolean rollup) { final URL resource = TestIndex.class.getClassLoader().getResource(resourceFilename); if (resource == null) { throw new IllegalArgumentException("cannot find resource " + resourceFilename); } log.info("Realtime loading index file[%s]", resource); CharSource stream = Resources.asByteSource(resource).asCharSource(Charsets.UTF_8); return makeRealtimeIndex(stream, rollup); } public static IncrementalIndex makeRealtimeIndex(final CharSource source) { return makeRealtimeIndex(source, true); } public static IncrementalIndex makeRealtimeIndex(final CharSource source, boolean rollup) { final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder() .withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()) .withTimestampSpec(new TimestampSpec("ds", "auto", null)) .withDimensionsSpec(DIMENSIONS_SPEC) .withVirtualColumns(VIRTUAL_COLUMNS) .withMetrics(METRIC_AGGS) .withRollup(rollup) .build(); final IncrementalIndex retVal = new IncrementalIndex.Builder() .setIndexSchema(schema) .setMaxRowCount(10000) .buildOnheap(); try { return loadIncrementalIndex(retVal, source); } catch (Exception e) { if (rollup) { realtimeIndex = null; } else { noRollupRealtimeIndex = null; } throw Throwables.propagate(e); } } public static IncrementalIndex loadIncrementalIndex( final IncrementalIndex retVal, final CharSource source ) throws IOException { final StringInputRowParser parser = new StringInputRowParser( new DelimitedParseSpec( new TimestampSpec("ts", "iso", null), new DimensionsSpec(DIMENSION_SCHEMAS, null, null), "\t", "\u0001", Arrays.asList(COLUMNS), false, 0 ) , "utf8" ); return loadIncrementalIndex(retVal, source, parser); } public static IncrementalIndex loadIncrementalIndex( final IncrementalIndex retVal, final CharSource source, final StringInputRowParser parser ) throws IOException { final AtomicLong startTime = new AtomicLong(); int lineCount = source.readLines( new LineProcessor<Integer>() { boolean runOnce = false; int lineCount = 0; @Override public boolean processLine(String line) throws IOException { if (!runOnce) { startTime.set(System.currentTimeMillis()); runOnce = true; } retVal.add(parser.parse(line)); ++lineCount; return true; } @Override public Integer getResult() { return lineCount; } } ); log.info("Loaded %,d lines in %,d millis.", lineCount, System.currentTimeMillis() - startTime.get()); return retVal; } public static QueryableIndex persistRealtimeAndLoadMMapped(IncrementalIndex index) { try { File someTmpFile = File.createTempFile("billy", "yay"); someTmpFile.delete(); someTmpFile.mkdirs(); someTmpFile.deleteOnExit(); INDEX_MERGER.persist(index, someTmpFile, indexSpec); return INDEX_IO.loadIndex(someTmpFile); } catch (IOException e) { throw Throwables.propagate(e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.phoenix.end2end; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.util.ArrayList; import org.junit.Test; public class NthValueFunctionIT extends ParallelStatsDisabledIT { @Test public void simpleTest() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nthValue = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nthValue + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " dates INTEGER, val INTEGER)"; conn.createStatement().execute(ddl); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (2, 8, 1, 7)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (3, 8, 2, 9)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (4, 8, 3, 4)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (5, 8, 4, 2)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(val, 2) WITHIN GROUP (ORDER BY dates DESC) FROM " + nthValue + " GROUP BY page_id"); assertTrue(rs.next()); assertEquals(rs.getInt(1), 4); assertFalse(rs.next()); } @Test public void multipleNthValueFunctionTest() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nthValue = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nthValue + " " + "(id INTEGER NOT NULL, feid UNSIGNED_LONG NOT NULL," + " uid INTEGER NOT NULL, lrd INTEGER" + " CONSTRAINT PKVIEW PRIMARY KEY ( id, feid, uid))"; conn.createStatement().execute(ddl); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 8, 1, 7)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 8, 2, 9)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 8, 3, 4)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 8, 4, 2)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 9, 5, 1)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 9, 6, 3)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 9, 8, 5)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 9, 7, 8)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 10, 5, 1)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 10, 6, 3)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 10, 7, 5)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (2, 10, 8, 8)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (3, 10, 5, 1)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (3, 10, 6, 3)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (3, 10, 7, 5)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, feid, uid, lrd) VALUES (3, 10, 8, 8)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(uid, 1) WITHIN GROUP (ORDER BY lrd DESC) as nth1_user_id, NTH_VALUE(uid, 2) WITHIN GROUP (ORDER BY lrd DESC) as nth2_user_id, NTH_VALUE(uid, 3) WITHIN GROUP (ORDER BY lrd DESC) as nth3_user_id FROM " + nthValue + " where id=2 and feid in (8, 9, 10) GROUP BY feid"); assertTrue(rs.next()); assertEquals(rs.getInt(1), 2); assertEquals(rs.getInt(2), 1); assertEquals(rs.getInt(3), 3); assertTrue(rs.next()); assertEquals(rs.getInt(1), 7); assertEquals(rs.getInt(2), 8); assertEquals(rs.getInt(3), 6); assertTrue(rs.next()); assertEquals(rs.getInt(1), 8); assertEquals(rs.getInt(2), 7); assertEquals(rs.getInt(3), 6); assertFalse(rs.next()); } @Test public void offsetValueAscOrder() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nth_test_table = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nth_test_table + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " \"DATE\" INTEGER, \"value\" UNSIGNED_LONG)"; conn.createStatement().execute(ddl); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (1, 8, 0, 300)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (2, 8, 1, 7)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (3, 8, 2, 9)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (4, 8, 3, 4)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (5, 8, 4, 2)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (6, 8, 5, 150)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(\"value\", 2) WITHIN GROUP (ORDER BY \"DATE\" ASC) FROM " + nth_test_table + " GROUP BY page_id"); assertTrue(rs.next()); assertEquals(rs.getLong(1), 7); assertFalse(rs.next()); } @Test public void offsetValueDescOrder() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nth_test_table = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nth_test_table + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " \"DATE\" INTEGER, \"value\" UNSIGNED_LONG)"; conn.createStatement().execute(ddl); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (1, 8, 0, 300)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (2, 8, 1, 7)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (3, 8, 2, 9)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (4, 8, 3, 4)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (5, 8, 4, 2)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (6, 8, 5, 150)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(\"value\", 2) WITHIN GROUP (ORDER BY \"DATE\" DESC) FROM " + nth_test_table + " GROUP BY page_id"); assertTrue(rs.next()); assertEquals(rs.getLong(1), 2); assertFalse(rs.next()); } @Test public void offsetValueSubAggregation() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nth_test_table = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nth_test_table + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " \"DATE\" INTEGER, \"value\" UNSIGNED_LONG)"; conn.createStatement().execute(ddl); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (1, 8, 0, 300)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (2, 8, 1, 7)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (3, 9, 2, 9)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (4, 9, 3, 4)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (5, 10, 4, 2)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (6, 10, 5, 150)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(SUM_VALUE, 2) WITHIN GROUP (ORDER BY MIN_DATE ASC) FROM (" + "SELECT MIN(\"DATE\") AS MIN_DATE, SUM(\"value\") AS SUM_VALUE FROM " + nth_test_table + " GROUP BY page_id) x"); assertTrue(rs.next()); assertEquals(13, rs.getLong(1)); assertFalse(rs.next()); } @Test public void offsetValueLastMismatchByColumn() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nth_test_table = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nth_test_table + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " \"DATE\" INTEGER, \"value\" UNSIGNED_LONG)"; conn.createStatement().execute(ddl); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (1, 8, 5, 8)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (2, 8, 2, 7)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (3, 8, 1, 9)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (4, 8, 4, 4)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (5, 8, 3, 2)"); conn.createStatement().execute( "UPSERT INTO " + nth_test_table + " (id, page_id, \"DATE\", \"value\") VALUES (6, 8, 0, 1)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(\"value\", 2) WITHIN GROUP (ORDER BY \"DATE\" DESC) FROM " + nth_test_table + " GROUP BY page_id"); assertTrue(rs.next()); assertEquals(rs.getLong(1), 4); assertFalse(rs.next()); } @Test public void testSortOrderInDataColWithOffset() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nth_test_table = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nth_test_table + " " + "(id INTEGER NOT NULL, page_id UNSIGNED_LONG," + " dates BIGINT NOT NULL, \"value\" BIGINT NOT NULL CONSTRAINT pk PRIMARY KEY (id, dates, \"value\" DESC))"; conn.createStatement().execute(ddl); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (1, 8, 1, 3)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (2, 8, 2, 7)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (3, 8, 3, 9)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (5, 8, 5, 158)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (4, 8, 4, 5)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(\"value\", 2) WITHIN GROUP (ORDER BY dates ASC) FROM " + nth_test_table + " GROUP BY page_id"); assertTrue(rs.next()); assertEquals(rs.getLong(1), 7); assertFalse(rs.next()); } @Test public void nonUniqueValuesInOrderByAsc() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nthValue = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nthValue + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " dates INTEGER, val INTEGER)"; conn.createStatement().execute(ddl); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (2, 8, 1, 7)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (3, 8, 2, 9)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (4, 8, 2, 4)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (5, 8, 2, 2)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (6, 8, 3, 3)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(val, 3) WITHIN GROUP (ORDER BY dates ASC) FROM " + nthValue + " GROUP BY page_id"); assertTrue(rs.next()); assertInIntArray(new int[]{2, 4, 9}, rs.getInt(1)); assertFalse(rs.next()); } @Test public void nonUniqueValuesInOrderByAscSkipDuplicit() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nthValue = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nthValue + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " dates INTEGER, val INTEGER)"; conn.createStatement().execute(ddl); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (2, 8, 1, 7)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (3, 8, 2, 9)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (4, 8, 2, 4)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (5, 8, 2, 2)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (6, 8, 3, 3)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(val, 5) WITHIN GROUP (ORDER BY dates ASC) FROM " + nthValue + " GROUP BY page_id"); assertTrue(rs.next()); assertEquals(3, rs.getInt(1)); assertFalse(rs.next()); } @Test public void nonUniqueValuesInOrderByDesc() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nthValue = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nthValue + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " dates INTEGER, val INTEGER)"; conn.createStatement().execute(ddl); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (2, 8, 1, 7)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (3, 8, 2, 9)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (4, 8, 2, 4)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (5, 8, 2, 2)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (6, 8, 3, 3)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(val, 3) WITHIN GROUP (ORDER BY dates DESC) FROM " + nthValue + " GROUP BY page_id"); assertTrue(rs.next()); assertInIntArray(new int[]{2, 4, 9}, rs.getInt(1)); assertFalse(rs.next()); } @Test public void nonUniqueValuesInOrderNextValueDesc() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nthValue = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nthValue + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " dates INTEGER, val INTEGER)"; conn.createStatement().execute(ddl); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (2, 8, 0, 7)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (3, 8, 1, 9)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (4, 8, 2, 4)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (5, 8, 2, 2)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (6, 8, 3, 3)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (7, 8, 3, 5)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(val, 2) WITHIN GROUP (ORDER BY dates DESC) FROM " + nthValue + " GROUP BY page_id"); assertTrue(rs.next()); assertInIntArray(new int[]{3, 5}, rs.getInt(1)); assertFalse(rs.next()); } @Test public void nonUniqueValuesInOrderNextValueAsc() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nthValue = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nthValue + " " + "(id INTEGER NOT NULL PRIMARY KEY, page_id UNSIGNED_LONG," + " dates INTEGER, val INTEGER)"; conn.createStatement().execute(ddl); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (2, 8, 0, 7)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (3, 8, 1, 9)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (4, 8, 2, 4)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (5, 8, 2, 2)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (6, 8, 3, 3)"); conn.createStatement().execute( "UPSERT INTO " + nthValue + " (id, page_id, dates, val) VALUES (7, 8, 3, 5)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(val, 5) WITHIN GROUP (ORDER BY dates ASC) FROM " + nthValue + " GROUP BY page_id"); assertTrue(rs.next()); assertInIntArray(new int[]{3, 5}, rs.getInt(1)); assertFalse(rs.next()); } @Test public void ignoreNullValues() throws Exception { Connection conn = DriverManager.getConnection(getUrl()); String nth_test_table = generateUniqueName(); String ddl = "CREATE TABLE IF NOT EXISTS " + nth_test_table + " " + "(id INTEGER NOT NULL, page_id UNSIGNED_LONG," + " dates BIGINT NOT NULL, \"value\" BIGINT NULL CONSTRAINT pk PRIMARY KEY (id, dates))"; conn.createStatement().execute(ddl); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (1, 8, 1, 1)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (2, 8, 2, NULL)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (3, 8, 3, NULL)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (5, 8, 4, 4)"); conn.createStatement().execute("UPSERT INTO " + nth_test_table + " (id, page_id, dates, \"value\") VALUES (4, 8, 5, 5)"); conn.commit(); ResultSet rs = conn.createStatement().executeQuery( "SELECT NTH_VALUE(\"value\", 2) WITHIN GROUP (ORDER BY dates DESC) FROM " + nth_test_table + " GROUP BY page_id"); assertTrue(rs.next()); assertEquals(rs.getLong(1), 4); assertFalse(rs.next()); } private void assertInIntArray(int[] should, int actualValue) { ArrayList<Integer> shouldList = new ArrayList<Integer>(); for (int i: should) { shouldList.add(i); } assertTrue(shouldList.contains(actualValue)); } }
/* RmiUtilities.java -- Copyright (C) 2005 Free Software Foundation, Inc. This file is part of GNU Classpath. GNU Classpath is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. GNU Classpath is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GNU Classpath; see the file COPYING. If not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Linking this library statically or dynamically with other modules is making a combined work based on this library. Thus, the terms and conditions of the GNU General Public License cover the whole combination. As a special exception, the copyright holders of this library give you permission to link this library with independent modules to produce an executable, regardless of the license terms of these independent modules, and to copy and distribute the resulting executable under terms of your choice, provided that you also meet, for each linked independent module, the terms and conditions of the license of that module. An independent module is a module which is not derived from or based on this library. If you modify this library, you may extend this exception to your version of the library, but you are not obligated to do so. If you do not wish to do so, delete this exception statement from your version. */ package gnu.javax.rmi.CORBA; import gnu.CORBA.OrbFunctional; import gnu.CORBA.Minor; import gnu.CORBA.Unexpected; import gnu.CORBA.CDR.Vio; import gnu.CORBA.CDR.gnuRuntime; import gnu.CORBA.CDR.gnuValueStream; import gnu.CORBA.CDR.HeadlessInput; import org.omg.CORBA.MARSHAL; import org.omg.CORBA.StringValueHelper; import org.omg.CORBA.WStringValueHelper; import org.omg.CORBA.portable.Delegate; import org.omg.CORBA.portable.InputStream; import org.omg.CORBA.portable.ObjectImpl; import org.omg.CORBA.portable.OutputStream; import org.omg.CORBA.portable.ValueBase; import org.omg.PortableServer.POA; import org.omg.PortableServer.POAHelper; import org.omg.PortableServer.Servant; import org.omg.PortableServer.POAManagerPackage.State; import org.omg.SendingContext.RunTime; import java.io.ByteArrayOutputStream; import java.io.DataOutputStream; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.rmi.Remote; import java.security.MessageDigest; import java.util.Arrays; import java.util.Comparator; import java.util.Iterator; import java.util.TreeSet; import java.util.WeakHashMap; import javax.rmi.PortableRemoteObject; import javax.rmi.CORBA.Stub; import javax.rmi.CORBA.Tie; import javax.rmi.CORBA.Util; /** * Defines methods that must be accessible in several derived classes. * * @author Audrius Meskauskas, Lithuania (AudriusA@Bioinformatics.org) */ public class RmiUtilities { /** * The currently used RMI-IIOP version format. */ public static byte VERSION = 1; /** * The non - writable class fields. */ static final int NON_WRITABLE = Modifier.STATIC | Modifier.TRANSIENT; /** * The standard String repository Id. */ public static final String RMI_STRING_ID = StringValueHelper.id(); /** * The standard Class repository Id. */ public static final String RMI_CLASS_ID = "RMI:javax.rmi.CORBA.ClassDesc:2BABDA04587ADCCC:CFBF02CF5294176B"; /** * The standard string array repository Id. */ public static final String RMI_STRING_ARRAY_ID = "RMI:[Ljava.lang.String;:071DA8BE7F971128:A0F0A4387A3BB342"; /** * An instance of the wide string value helper for writing strings. */ static WStringValueHelper wStringValueHelper = new WStringValueHelper(); /** * Set of serializable classes that have .writeObject and .readObject defined. * Contains weak references to ensure that the classes will be unloadable. */ WeakHashMap io_format = new WeakHashMap(); /** * The standard IO format with no .writeObject and .readObject defined. */ static final Object STANDARD = new Object(); /** * The custom IO format with .writeObject and .readObject defined, * defaultWriteObject called. */ static final Object CUSTOM_DWO = new Object(); /** * The custom IO format with .writeObject and .readObject defined, * defaultWriteObject has not been called. */ static final Object CUSTOM_NO_DWO = new Object(); /** * The arguments for readObject. */ static final Class[] READ_OBJECT_ARGS = new Class[] { ObjectInputStream.class }; /** * The arguments for writeObject. */ static final Class[] WRITE_OBJECT_ARGS = new Class[] { ObjectOutputStream.class }; /** * The undocumented field that is heading the Sun's object data, written with * writeObject. */ static final int S_X = 16908034; /** * Write all fields of the passed value. */ void writeFields(OutputStream an_output, Serializable object) { org.omg.CORBA_2_3.portable.OutputStream output = (org.omg.CORBA_2_3.portable.OutputStream) an_output; try { Class o_class = object.getClass(); Field[] fields = getWritableFields(o_class); Field f; Class fc; for (int i = 0; i < fields.length; i++) { f = fields[i]; fc = f.getType(); Object v = f.get(object); if (fc == String.class) { output.write_value((Serializable) v, wStringValueHelper); } else if (fc == int.class) output.write_long(((Integer) v).intValue()); else if (fc == long.class) output.write_longlong(((Number) v).longValue()); else if (fc == double.class) output.write_double(((Number) v).doubleValue()); else if (fc == float.class) output.write_float(((Number) v).floatValue()); else if (fc == boolean.class) output.write_boolean(((Boolean) v).booleanValue()); else if (fc == short.class) output.write_short(((Number) v).shortValue()); else if (fc == byte.class) output.write_octet(((Number) v).byteValue()); else if (fc == char.class) output.write_wchar(((Character) v).charValue()); else { if (!fc.isInterface() && Remote.class.isAssignableFrom(fc)) fc = getExportedInterface(fc); writeMember(output, v, fc); } } } catch (Exception ex) { MARSHAL m = new MARSHAL("Cannot write " + object); m.minor = Minor.ValueFields; m.initCause(ex); throw m; } } /** * Write a memeber (field) of the data structure. */ void writeMember(org.omg.CORBA_2_3.portable.OutputStream output, Object object, Class xClass) { if (output instanceof gnuValueStream) { gnuRuntime g = ((gnuValueStream) output).getRunTime(); // Reset the target as we are already beyond the critical point // where is must have the value being written. if (g != null) g.target = null; } if (Serializable.class.isAssignableFrom(xClass) || Remote.class.isAssignableFrom(xClass)) { // Object handles null reference on its own. if (org.omg.CORBA.Object.class.isAssignableFrom(xClass) || Remote.class.isAssignableFrom(xClass)) { if (object == null) output.write_Object(null); else if (isTieRequired(object)) exportTie(output, object, xClass); else writeValue(output, (Serializable) object); } else output.write_value((Serializable) object, xClass); } else { MARSHAL m = new MARSHAL(xClass + " is not Serializable"); m.minor = Minor.NonSerializable; throw m; } } /** * Check if the object must be wrapped into Tie, connected to the ORB and then * the corresponding Stub be written. */ public boolean isTieRequired(Object object) { return object instanceof Remote && !(object instanceof Stub); } /** * Get the interface under that the class of this object must be exposed. The * interface must be derived from Remote. */ Class getExportedInterface(Object object) throws MARSHAL { Class fc = null; Class[] interfaces = object.getClass().getInterfaces(); for (int i = 0; i < interfaces.length; i++) { if (!Remote.class.equals(interfaces[i])) if (Remote.class.isAssignableFrom(interfaces[i])) { if (fc == null) fc = interfaces[i]; else { MARSHAL m = new MARSHAL("Both " + fc + " and " + interfaces[i] + " extends Remote"); m.minor = Minor.TargetConversion; throw m; } } } if (fc == null) { MARSHAL m = new MARSHAL(object.getClass() + " does not implement any interface, derived from Remote"); m.minor = Minor.TargetConversion; throw m; } return fc; } /** * Get the persistent hash code for the given class, as defined by OMG * standard. The inheritance, field names and types (but not the visibility) * are taken into consideration as well as the presence of the writeObject * method are taken into consideration. The class name and methods, if any, * are not taken into consideration. */ public static long getHashCode(Class c) { Class of = c.isArray() ? c.getComponentType() : null; if (c.isArray() && ((!Serializable.class.isAssignableFrom(of) || of.isPrimitive() || Remote.class.isAssignableFrom(of)))) return 0; if (!Serializable.class.isAssignableFrom(c)) return 0; try { ByteArrayOutputStream bout = new ByteArrayOutputStream(); DataOutputStream out = new DataOutputStream(bout); Class superClass = c.getSuperclass(); if (superClass != null) out.writeLong(getHashCode(superClass)); int writeObjectPresentCode; try { c.getDeclaredMethod("writeObject", new Class[] { ObjectOutputStream.class }); writeObjectPresentCode = 2; // Exists. } catch (NoSuchMethodException e) { writeObjectPresentCode = 1; // Missing. } out.writeInt(writeObjectPresentCode); Field[] fields = c.getDeclaredFields(); Arrays.sort(fields, new Comparator() { public int compare(Object a, Object b) { Field fa = (Field) a; Field fb = (Field) b; return fa.getName().compareTo(fb.getName()); } }); Field f; for (int i = 0; i < fields.length; i++) { f = fields[i]; if ((f.getModifiers() & NON_WRITABLE) == 0) { out.writeUTF(f.getName()); out.writeUTF(getDescriptor(f.getType())); } } out.flush(); out.close(); MessageDigest shaDigest; try { shaDigest = MessageDigest.getInstance("SHA"); } catch (Exception ex) { throw new InternalError("SHA digesting algorithm is not available"); } // Return the digest value to the calling // method as an array of bytes. byte[] sha = shaDigest.digest(bout.toByteArray()); long hash = 0; for (int i = 0; i < Math.min(8, sha.length); i++) { hash += (long) (sha[i] & 255) << (i * 8); } return hash; } catch (IOException ioex) { throw new Unexpected(ioex); } } /** * Converts to hexadecimal string, supplementing leading zeros. */ public static String toHex(long l) { StringBuffer b = new StringBuffer(); b.append(Long.toHexString(l).toUpperCase()); while (b.length() < 16) b.insert(0, '0'); return b.toString(); } /** * Returns a <code>String</code> representing the type-encoding of a class. */ static String getDescriptor(Class type) { if (type.equals(boolean.class)) return "Z"; if (type.equals(byte.class)) return "B"; if (type.equals(short.class)) return "S"; if (type.equals(char.class)) return "C"; if (type.equals(int.class)) return "I"; if (type.equals(long.class)) return "J"; if (type.equals(float.class)) return "F"; if (type.equals(double.class)) return "D"; if (type.equals(void.class)) return "V"; else if (type.isArray()) { StringBuffer l = new StringBuffer("["); Class component = type.getComponentType(); while (component.isArray()) { l.append('['); component = component.getComponentType(); } l.append('L'); l.append(component.getName().replace('.', '/')); l.append(';'); return l.toString(); } else return "L" + type.getName().replace('.', '/') + ';'; } public static Field[] getWritableFields(Class c) { TreeSet set = new TreeSet(new Comparator() { public int compare(Object a, Object b) { return ((Field) a).getName().compareTo(((Field) b).getName()); } }); while (!c.equals(Object.class)) { Field[] f = c.getDeclaredFields(); for (int i = 0; i < f.length; i++) { if ((f[i].getModifiers() & NON_WRITABLE) == 0) { f[i].setAccessible(true); set.add(f[i]); } } c = c.getSuperclass(); } Field[] r = new Field[set.size()]; int p = 0; Iterator it = set.iterator(); while (it.hasNext()) { r[p++] = (Field) it.next(); } return r; } /** * The method is called for Remotes that are not Stubs. It is assumed, that * the Remote is an implementation. The method searches for the suitable tie * and, if found, exports it by creating and connecting the stub. Such export * is supported since jdk 1.5. */ void exportTie(org.omg.CORBA_2_3.portable.OutputStream output, Object implementation, Class interfaceClass) { try { // Remote, but non - stub class (implementation) // must be replaced by stub. Tie t = Util.getTie((Remote) implementation); if (t instanceof Servant) { POA rootPoa = POAHelper.narrow(output.orb().resolve_initial_references( "RootPOA")); org.omg.CORBA.Object co = rootPoa.servant_to_reference((Servant) t); Stub stub = (Stub) PortableRemoteObject.narrow(co, interfaceClass); writeRemoteObject(output, stub); if (rootPoa.the_POAManager().get_state().value() == State._HOLDING) rootPoa.the_POAManager().activate(); } else if (t instanceof org.omg.CORBA.Object) { org.omg.CORBA.Object co = (org.omg.CORBA.Object) t; output.orb().connect(co); Stub stub = (Stub) PortableRemoteObject.narrow(co, interfaceClass); writeRemoteObject(output, stub); } } catch (Exception ex) { MARSHAL m = new MARSHAL("Unable to export " + implementation); m.minor = Minor.TargetConversion; m.initCause(ex); throw m; } } /** * Start the ORB, if it is not already runnning. */ void ensureOrbRunning(org.omg.CORBA_2_3.portable.OutputStream output) { // Ensure ORB is running. if (output.orb() instanceof OrbFunctional) { ((OrbFunctional) output.orb()).ensureRunning(); } } /** * Write data to the CORBA output stream. Writes the object contents only; the * header must be already written. For object, containing objects, may be * called recursively. * * @param an_output a stream to write to, must be * org.omg.CORBA_2_3.portable.OutputStream * @param object an object to write. */ public void writeRemoteObject(OutputStream an_output, Object object) { org.omg.CORBA_2_3.portable.OutputStream output = (org.omg.CORBA_2_3.portable.OutputStream) an_output; if (isTieRequired(object)) { // Find the interface that is implemented by the object and extends // Remote. Class fc = getExportedInterface(object); exportTie(output, object, fc); } else if (object instanceof org.omg.CORBA.Object) { ensureOrbRunning(output); an_output.write_Object((org.omg.CORBA.Object) object); } else if (object != null && object instanceof Serializable) writeFields(an_output, (Serializable) object); } /** * Write data to the CORBA output stream. Writes the object contents only; the * header must be already written. For object, containing objects, may be * called recursively. * * @param an_output a stream to write to, must be * org.omg.CORBA_2_3.portable.OutputStream * @param object an object to write. */ public void writeValue(OutputStream an_output, Serializable object) { org.omg.CORBA_2_3.portable.OutputStream output = (org.omg.CORBA_2_3.portable.OutputStream) an_output; if (isTieRequired(object)) { // Find the interface that is implemented by the object and extends // Remote. Class fc = getExportedInterface(object); exportTie(output, object, fc); } else if (object instanceof org.omg.CORBA.Object) { ensureOrbRunning(output); an_output.write_Object((org.omg.CORBA.Object) object); } else if (object instanceof Externalizable) { try { ObjectOutputStream stream = new CorbaOutput(output, object, this); stream.write(VERSION); ((Externalizable) object).writeExternal(stream); } catch (Exception ex) { MARSHAL m = new MARSHAL("writeExternal failed"); m.minor = Minor.Value; m.initCause(ex); throw m; } } else if (object instanceof Serializable) { Object mode = null; synchronized (io_format) { mode = io_format.get(object.getClass()); if (mode == STANDARD) { writeFields(an_output, (Serializable) object); return; } } try { Method m = object.getClass().getDeclaredMethod("writeObject", WRITE_OBJECT_ARGS); m.setAccessible(true); // May be private. try { ObjectOutputStream stream = new CorbaOutput(output, object, this); // Write version. stream.write(VERSION); if (mode == CUSTOM_DWO) // Write true, supposing that the defaultWriteObject // has been called. stream.write(1); else if (mode == CUSTOM_NO_DWO) // Write false (has not been called) stream.write(0); else { // Measure. DefaultWriteObjectTester tester = new DefaultWriteObjectTester(object); m.invoke(object, new Object[] { tester }); synchronized (io_format) { io_format.put(object.getClass(), tester.dwo_called ? CUSTOM_DWO : CUSTOM_NO_DWO); stream.write(tester.dwo_called ? 1 : 0); } } m.invoke(object, new Object[] { stream }); stream.flush(); } catch (Exception ex) { MARSHAL mx = new MARSHAL(object.getClass().getName() + ".writeObject failed"); mx.initCause(ex); throw mx; } } catch (NoSuchMethodException e) { // Write in a standard way. writeFields(an_output, (Serializable) object); synchronized (io_format) { io_format.put(object.getClass(), STANDARD); } } } } /** * Read data from the CDR input stream. Reads the object contents only; the * header must be already read (the repository id or ids ara passed). For * object, containing objects, may be called recursively. * * @param an_input the stream to read from, must be * org.omg.CORBA_2_3.portable.InputStream * @param object the instance of the object being read. * @param id the repository Id from the stream in the case when single id was * specified. * @param ids the repository Ids from the stream in the case when multiple ids * were specified. * @param codebase the codebase, if it was included in the header of the value * type. Null if not codebase was included. * * @return the object, extracted from the stream. */ /** * Read value from the input stream in the case when the value is not * Streamable or CustomMarshalled. */ public Serializable readValue(InputStream in, int offset, Class clz, String repositoryID, RunTime sender) { if (in instanceof HeadlessInput) ((HeadlessInput) in).subsequentCalls = true; gnuRuntime g; Serializable object = null; try { g = (gnuRuntime) sender; object = g.target; } catch (ClassCastException e) { // Working with the other CORBA implementation. g = null; } org.omg.CORBA_2_3.portable.InputStream input = (org.omg.CORBA_2_3.portable.InputStream) in; if (Remote.class.isAssignableFrom(clz) || ValueBase.class.isAssignableFrom(clz)) { // Interface is narrowed into Stub. if (clz.isInterface()) try { clz = Util.loadClass( PortableRemoteObjectDelegateImpl.getStubClassName(clz.getName()), null, clz.getClassLoader()); } catch (ClassNotFoundException e) { MARSHAL m = new MARSHAL("Cannot get stub from interface " + clz.getClass().getName()); m.minor = Minor.TargetConversion; m.initCause(e); throw m; } // Remote needs special handling. if (ObjectImpl.class.isAssignableFrom(clz)) { // First read CORBA object reference. Object ro = input.read_Object(); ObjectImpl obj = (ObjectImpl) ro; if (obj == null) return null; Delegate delegate = obj._get_delegate(); object = instantiate(offset, clz, g); ((ObjectImpl) object)._set_delegate(delegate); } // The object - specific data follows. } else if (org.omg.CORBA.Object.class.isAssignableFrom(clz)) object = (Serializable) input.read_Object(); if (object == null) object = instantiate(offset, clz, g); // The sentence below prevents attempt to read the internal fields of the // ObjectImpl (or RMI Stub) that might follow the object definition. // Sun's jre 1.5 does not write this information. The stubs, generated // by rmic, does not contain such fields. if (object instanceof ObjectImpl) return object; if (object instanceof Externalizable) { try { CorbaInput stream = new CorbaInput(input, object, this, offset, repositoryID, g); byte version = stream.readByte(); if (version != 1) throw new MARSHAL("Unsuported RMI-IIOP version " + version); ((Externalizable) object).readExternal(stream); } catch (Exception ex) { MARSHAL m = new MARSHAL("readExternal failed"); m.initCause(ex); throw m; } } else { Object mode = null; synchronized (io_format) { mode = io_format.get(object.getClass()); } if (mode == STANDARD) { readFields(offset, repositoryID, object, input, g); } else { try { Method m = object.getClass().getDeclaredMethod("readObject", READ_OBJECT_ARGS); try { m.setAccessible(true); // May be private. CorbaInput stream = new CorbaInput(input, object, this, offset, repositoryID, g); byte version = stream.readByte(); if (version != 1) throw new MARSHAL("Unsuported RMI-IIOP version " + version); // This would indicate is defaultWriteObject has been // called, // but the readObject method normally takes care about this. boolean dwo = stream.readByte() != 0; m.invoke(object, new Object[] { stream }); synchronized (io_format) { io_format.put(object.getClass(), dwo ? CUSTOM_DWO : CUSTOM_NO_DWO); } } catch (Exception ex) { ex.printStackTrace(); MARSHAL mx = new MARSHAL(object.getClass().getName() + ".readObject failed"); mx.initCause(ex); throw mx; } } catch (NoSuchMethodException e) { // Read in a standard way. synchronized (io_format) { io_format.put(object.getClass(), STANDARD); readFields(offset, repositoryID, object, input, g); } } } } return object; } /** * Create an instance. */ Serializable instantiate(int offset, Class clz, gnuRuntime g) throws MARSHAL { Serializable object; try { object = (Serializable) Vio.instantiateAnyWay(clz); g.objectWritten(object, offset); } catch (Exception e) { MARSHAL m = new MARSHAL("Unable to instantiate " + clz); m.minor = Minor.Instantiation; m.initCause(e); throw m; } return object; } /** * Read fields of the object. */ void readFields(int offset, String repositoryID, Serializable object, org.omg.CORBA_2_3.portable.InputStream input, gnuRuntime r) throws MARSHAL { Field f = null; Class o_class = object.getClass(); try { // The returned field array must already be in canonical order. Field[] fields = getWritableFields(o_class); Class fc; for (int i = 0; i < fields.length; i++) { // Full value type header expected ahead. if (input instanceof HeadlessInput) ((HeadlessInput) input).subsequentCalls = true; f = fields[i]; fc = f.getType(); Object v; if (fc == String.class) { v = input.read_value(wStringValueHelper); } else if (fc == int.class) v = new Integer(input.read_long()); else if (fc == long.class) v = new Long(input.read_longlong()); else if (fc == double.class) v = new Double(input.read_double()); else if (fc == float.class) v = new Float(input.read_float()); else if (fc == boolean.class) v = input.read_boolean() ? Boolean.TRUE : Boolean.FALSE; else if (fc == short.class) v = new Short(input.read_short()); else if (fc == byte.class) v = new Byte(input.read_octet()); else if (fc == char.class) v = new Character(input.read_char()); else if (org.omg.CORBA.Object.class.isAssignableFrom(fc) || Remote.class.isAssignableFrom(fc)) { v = readValue(input, offset, fc, null, r); } else { v = Vio.read(input, fc); } f.set(object, v); } } catch (Exception ex) { MARSHAL m = new MARSHAL("Cannot read " + o_class.getName() + " field " + f); m.initCause(ex); m.minor = Minor.ValueFields; throw m; } } }
package com.noodleofdeath.pastaparser.model.grammar.rule.impl; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import com.noodleofdeath.pastaparser.model.grammar.Grammar; import com.noodleofdeath.pastaparser.model.grammar.rule.GrammarRuleComponentType; import com.noodleofdeath.pastaparser.model.grammar.rule.GrammarRule; import com.noodleofdeath.pastaparser.model.grammar.rule.GrammarRuleCommand; import com.noodleofdeath.pastaparser.model.grammar.rule.GrammarRuleType; import com.noodleofdeath.pastaparser.model.graph.impl.AbstractGrammarTree; /** * Abstract implementation of {@link GrammarRule}. * */ public class BaseGrammarRule extends AbstractGrammarTree<GrammarRule> implements GrammarRule { /** Children that are queued for adding. */ protected List<GrammarRule> queue = new ArrayList<>(); /** Parent grammar associated with this grammar rule. */ protected Grammar grammar; /** Unique identifier for this grammar rule. */ protected String id = null; /** Immediate atomic value of this grammar rule. */ protected String value = null; /** * */ protected HashMap<GrammarRuleType, ArrayList<GrammarRule>> ruleMap = new LinkedHashMap<>(); /** * <code>true</code> if this rule is an exclusion rule; <code>false</code>, * otherwise. */ protected boolean inverted = false; /** Order of this grammar rule. */ protected int order = Integer.MAX_VALUE; /** Categories associated with this grammar rule. */ protected List<String> categories = new ArrayList<>(); /** Option flags of this grammar rule. */ protected List<String> options = new ArrayList<>(); /** Type of this grammar rule. Default is {@link GrammarRuleType#Unknown}. */ protected GrammarRuleType ruleType = GrammarRuleType.Unknown; /** * Type of this grammar rule. Default is {@link GrammarRuleComponentType#Unknown}. */ protected GrammarRuleComponentType componentType = GrammarRuleComponentType.Unknown; /** Command of this grammar rule, if one exists. */ protected GrammarRuleCommand command = null; /** Root context of this grammar rule, if one exists. */ protected GrammarRule rootContext = null; /** Previous sibling of this grammar rule, if one exists. */ protected GrammarRule prev = null; /** Next sibling of this grammar rule, if one exists. */ protected GrammarRule next = null; /** Constructs a new grammar rule with no initialized properties. */ public BaseGrammarRule() { } /** * Constructs a new grammar rule with an initial identifier. * * @param id to set for this grammar rule. */ public BaseGrammarRule(String id) { this(id, null); } /** * Constructs a new grammar rule with an initial identifier and value. * * @param id to set for this grammar rule. * @param value to set for this grammar rule. */ public BaseGrammarRule(String id, String value) { this(id, value, GrammarRuleComponentType.Unknown); } /** * Constructs a new grammar rule with an initial identifier, value, and rule * type. * * @param id to set for this grammar rule. * @param value to set for this grammar rule. * @param type to set for this grammar rule. */ public BaseGrammarRule(String id, String value, GrammarRuleComponentType componentType) { this(id, value, componentType, null); } /** * Constructs a new grammar rule with an initial identifier, value, rule type, * and grammar. * * @param id to set for this grammar rule. * @param value to set for this grammar rule. * @param componentType to set for this grammar rule. * @param grammar to set for this grammar rule. */ public BaseGrammarRule(String id, String value, GrammarRuleComponentType componentType, Grammar grammar) { this.id = id; this.value = value; this.componentType = componentType; this.grammar = grammar; } @Override public String toString() { String stringValue = ""; switch (componentType) { case Literal: stringValue += String.format("'%s'%s", value, quantifier()); break; case Composite: break; case LexerRule: case LexerFragment: default: stringValue += String.format("%s%s", value, quantifier()); break; } if (subrules().size() > 0) { List<String> strings = new ArrayList<>(); for (GrammarRule subrule : subrules()) strings.add(subrule.toString()); stringValue += String.format(" (%s)%s", String.join(" | ", strings), quantifier()); } if (next() != null) stringValue += String.format(" %s", next()); if (command != null) stringValue += String.format(" -> %s", command); if (inverted()) stringValue = String.format("~%s", stringValue); return stringValue.trim(); } @Override public List<GrammarRule> subrules(GrammarRuleType ruleType) { return ruleMap.get(ruleType); } @Override public HashMap<GrammarRuleType, ArrayList<GrammarRule>> ruleMap() { return ruleMap; } @Override public boolean enqueue(GrammarRule rule) { return queue.add(rule); } @Override public List<GrammarRule> queue() { return queue; } @Override public void clearQueue() { queue.clear(); } @Override public Grammar grammar() { return grammar; } @Override public void setGrammar(Grammar grammar) { this.grammar = grammar; } @Override public String id() { return id; } @Override public void setId(String id) { this.id = id; } @Override public String value() { return value; } @Override public void setValue(String value) { this.value = value; } @Override public boolean inverted() { return inverted; } @Override public void setInverted(boolean inverted) { this.inverted = inverted; } @Override public int order() { return order; } @Override public void setOrder(int order) { this.order = order; } @Override public GrammarRuleType ruleType() { return ruleType; } @Override public void setRuleType(GrammarRuleType ruleType) { this.ruleType = ruleType; } @Override public GrammarRuleComponentType componentType() { return componentType; } @Override public void setRuleComponentType(GrammarRuleComponentType componentType) { this.componentType = componentType; } @Override public List<String> categories() { return categories; } @Override public void setCategories(List<String> categories) { this.categories = categories; } @Override public List<String> options() { return options; } @Override public void setOptions(List<String> options) { this.options = options; } @Override public GrammarRuleCommand command() { return command; } @Override public void setCommand(GrammarRuleCommand command) { this.command = command; } @Override public GrammarRule prev() { return prev; } @Override public void setPrev(GrammarRule prev) { this.prev = prev; } @Override public GrammarRule next() { return next; } @Override public void setNext(GrammarRule next) { next.setPrev(this); this.next = next; } }
/* Copyright (C) 2013-2014 Computer Sciences Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Autogenerated by Thrift Compiler (0.9.1) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package ezbake.services.centralPurge.thrift; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.thrift.scheme.IScheme; import org.apache.thrift.scheme.SchemeFactory; import org.apache.thrift.scheme.StandardScheme; import org.apache.thrift.scheme.TupleScheme; import org.apache.thrift.protocol.TTupleProtocol; import org.apache.thrift.protocol.TProtocolException; import org.apache.thrift.EncodingUtils; import org.apache.thrift.TException; import org.apache.thrift.async.AsyncMethodCallback; import org.apache.thrift.server.AbstractNonblockingServer.*; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.EnumMap; import java.util.Set; import java.util.HashSet; import java.util.EnumSet; import java.util.Collections; import java.util.BitSet; import java.nio.ByteBuffer; import java.util.Arrays; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class CentralAgeOffEventQueryResults implements org.apache.thrift.TBase<CentralAgeOffEventQueryResults, CentralAgeOffEventQueryResults._Fields>, java.io.Serializable, Cloneable, Comparable<CentralAgeOffEventQueryResults> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("CentralAgeOffEventQueryResults"); private static final org.apache.thrift.protocol.TField AGE_OFF_EVENT_STATES_FIELD_DESC = new org.apache.thrift.protocol.TField("ageOffEventStates", org.apache.thrift.protocol.TType.LIST, (short)1); private static final org.apache.thrift.protocol.TField COUNT_FIELD_DESC = new org.apache.thrift.protocol.TField("count", org.apache.thrift.protocol.TType.I64, (short)2); private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>(); static { schemes.put(StandardScheme.class, new CentralAgeOffEventQueryResultsStandardSchemeFactory()); schemes.put(TupleScheme.class, new CentralAgeOffEventQueryResultsTupleSchemeFactory()); } public List<CentralAgeOffEventState> ageOffEventStates; // required public long count; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { AGE_OFF_EVENT_STATES((short)1, "ageOffEventStates"), COUNT((short)2, "count"); private static final Map<String, _Fields> byName = new HashMap<String, _Fields>(); static { for (_Fields field : EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // AGE_OFF_EVENT_STATES return AGE_OFF_EVENT_STATES; case 2: // COUNT return COUNT; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ public static _Fields findByName(String name) { return byName.get(name); } private final short _thriftId; private final String _fieldName; _Fields(short thriftId, String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public String getFieldName() { return _fieldName; } } // isset id assignments private static final int __COUNT_ISSET_ID = 0; private byte __isset_bitfield = 0; public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.AGE_OFF_EVENT_STATES, new org.apache.thrift.meta_data.FieldMetaData("ageOffEventStates", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, CentralAgeOffEventState.class)))); tmpMap.put(_Fields.COUNT, new org.apache.thrift.meta_data.FieldMetaData("count", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); metaDataMap = Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(CentralAgeOffEventQueryResults.class, metaDataMap); } public CentralAgeOffEventQueryResults() { } public CentralAgeOffEventQueryResults( List<CentralAgeOffEventState> ageOffEventStates, long count) { this(); this.ageOffEventStates = ageOffEventStates; this.count = count; setCountIsSet(true); } /** * Performs a deep copy on <i>other</i>. */ public CentralAgeOffEventQueryResults(CentralAgeOffEventQueryResults other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetAgeOffEventStates()) { List<CentralAgeOffEventState> __this__ageOffEventStates = new ArrayList<CentralAgeOffEventState>(other.ageOffEventStates.size()); for (CentralAgeOffEventState other_element : other.ageOffEventStates) { __this__ageOffEventStates.add(new CentralAgeOffEventState(other_element)); } this.ageOffEventStates = __this__ageOffEventStates; } this.count = other.count; } public CentralAgeOffEventQueryResults deepCopy() { return new CentralAgeOffEventQueryResults(this); } @Override public void clear() { this.ageOffEventStates = null; setCountIsSet(false); this.count = 0; } public int getAgeOffEventStatesSize() { return (this.ageOffEventStates == null) ? 0 : this.ageOffEventStates.size(); } public java.util.Iterator<CentralAgeOffEventState> getAgeOffEventStatesIterator() { return (this.ageOffEventStates == null) ? null : this.ageOffEventStates.iterator(); } public void addToAgeOffEventStates(CentralAgeOffEventState elem) { if (this.ageOffEventStates == null) { this.ageOffEventStates = new ArrayList<CentralAgeOffEventState>(); } this.ageOffEventStates.add(elem); } public List<CentralAgeOffEventState> getAgeOffEventStates() { return this.ageOffEventStates; } public CentralAgeOffEventQueryResults setAgeOffEventStates(List<CentralAgeOffEventState> ageOffEventStates) { this.ageOffEventStates = ageOffEventStates; return this; } public void unsetAgeOffEventStates() { this.ageOffEventStates = null; } /** Returns true if field ageOffEventStates is set (has been assigned a value) and false otherwise */ public boolean isSetAgeOffEventStates() { return this.ageOffEventStates != null; } public void setAgeOffEventStatesIsSet(boolean value) { if (!value) { this.ageOffEventStates = null; } } public long getCount() { return this.count; } public CentralAgeOffEventQueryResults setCount(long count) { this.count = count; setCountIsSet(true); return this; } public void unsetCount() { __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __COUNT_ISSET_ID); } /** Returns true if field count is set (has been assigned a value) and false otherwise */ public boolean isSetCount() { return EncodingUtils.testBit(__isset_bitfield, __COUNT_ISSET_ID); } public void setCountIsSet(boolean value) { __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __COUNT_ISSET_ID, value); } public void setFieldValue(_Fields field, Object value) { switch (field) { case AGE_OFF_EVENT_STATES: if (value == null) { unsetAgeOffEventStates(); } else { setAgeOffEventStates((List<CentralAgeOffEventState>)value); } break; case COUNT: if (value == null) { unsetCount(); } else { setCount((Long)value); } break; } } public Object getFieldValue(_Fields field) { switch (field) { case AGE_OFF_EVENT_STATES: return getAgeOffEventStates(); case COUNT: return Long.valueOf(getCount()); } throw new IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new IllegalArgumentException(); } switch (field) { case AGE_OFF_EVENT_STATES: return isSetAgeOffEventStates(); case COUNT: return isSetCount(); } throw new IllegalStateException(); } @Override public boolean equals(Object that) { if (that == null) return false; if (that instanceof CentralAgeOffEventQueryResults) return this.equals((CentralAgeOffEventQueryResults)that); return false; } public boolean equals(CentralAgeOffEventQueryResults that) { if (that == null) return false; boolean this_present_ageOffEventStates = true && this.isSetAgeOffEventStates(); boolean that_present_ageOffEventStates = true && that.isSetAgeOffEventStates(); if (this_present_ageOffEventStates || that_present_ageOffEventStates) { if (!(this_present_ageOffEventStates && that_present_ageOffEventStates)) return false; if (!this.ageOffEventStates.equals(that.ageOffEventStates)) return false; } boolean this_present_count = true; boolean that_present_count = true; if (this_present_count || that_present_count) { if (!(this_present_count && that_present_count)) return false; if (this.count != that.count) return false; } return true; } @Override public int hashCode() { HashCodeBuilder builder = new HashCodeBuilder(); boolean present_ageOffEventStates = true && (isSetAgeOffEventStates()); builder.append(present_ageOffEventStates); if (present_ageOffEventStates) builder.append(ageOffEventStates); boolean present_count = true; builder.append(present_count); if (present_count) builder.append(count); return builder.toHashCode(); } @Override public int compareTo(CentralAgeOffEventQueryResults other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = Boolean.valueOf(isSetAgeOffEventStates()).compareTo(other.isSetAgeOffEventStates()); if (lastComparison != 0) { return lastComparison; } if (isSetAgeOffEventStates()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.ageOffEventStates, other.ageOffEventStates); if (lastComparison != 0) { return lastComparison; } } lastComparison = Boolean.valueOf(isSetCount()).compareTo(other.isSetCount()); if (lastComparison != 0) { return lastComparison; } if (isSetCount()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.count, other.count); if (lastComparison != 0) { return lastComparison; } } return 0; } public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { schemes.get(iprot.getScheme()).getScheme().read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { schemes.get(oprot.getScheme()).getScheme().write(oprot, this); } @Override public String toString() { StringBuilder sb = new StringBuilder("CentralAgeOffEventQueryResults("); boolean first = true; sb.append("ageOffEventStates:"); if (this.ageOffEventStates == null) { sb.append("null"); } else { sb.append(this.ageOffEventStates); } first = false; if (!first) sb.append(", "); sb.append("count:"); sb.append(this.count); first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (ageOffEventStates == null) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'ageOffEventStates' was not present! Struct: " + toString()); } // alas, we cannot check 'count' because it's a primitive and you chose the non-beans generator. // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class CentralAgeOffEventQueryResultsStandardSchemeFactory implements SchemeFactory { public CentralAgeOffEventQueryResultsStandardScheme getScheme() { return new CentralAgeOffEventQueryResultsStandardScheme(); } } private static class CentralAgeOffEventQueryResultsStandardScheme extends StandardScheme<CentralAgeOffEventQueryResults> { public void read(org.apache.thrift.protocol.TProtocol iprot, CentralAgeOffEventQueryResults struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // AGE_OFF_EVENT_STATES if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list54 = iprot.readListBegin(); struct.ageOffEventStates = new ArrayList<CentralAgeOffEventState>(_list54.size); for (int _i55 = 0; _i55 < _list54.size; ++_i55) { CentralAgeOffEventState _elem56; _elem56 = new CentralAgeOffEventState(); _elem56.read(iprot); struct.ageOffEventStates.add(_elem56); } iprot.readListEnd(); } struct.setAgeOffEventStatesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // COUNT if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.count = iprot.readI64(); struct.setCountIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); // check for required fields of primitive type, which can't be checked in the validate method if (!struct.isSetCount()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'count' was not found in serialized data! Struct: " + toString()); } struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, CentralAgeOffEventQueryResults struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.ageOffEventStates != null) { oprot.writeFieldBegin(AGE_OFF_EVENT_STATES_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.ageOffEventStates.size())); for (CentralAgeOffEventState _iter57 : struct.ageOffEventStates) { _iter57.write(oprot); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldBegin(COUNT_FIELD_DESC); oprot.writeI64(struct.count); oprot.writeFieldEnd(); oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class CentralAgeOffEventQueryResultsTupleSchemeFactory implements SchemeFactory { public CentralAgeOffEventQueryResultsTupleScheme getScheme() { return new CentralAgeOffEventQueryResultsTupleScheme(); } } private static class CentralAgeOffEventQueryResultsTupleScheme extends TupleScheme<CentralAgeOffEventQueryResults> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, CentralAgeOffEventQueryResults struct) throws org.apache.thrift.TException { TTupleProtocol oprot = (TTupleProtocol) prot; { oprot.writeI32(struct.ageOffEventStates.size()); for (CentralAgeOffEventState _iter58 : struct.ageOffEventStates) { _iter58.write(oprot); } } oprot.writeI64(struct.count); } @Override public void read(org.apache.thrift.protocol.TProtocol prot, CentralAgeOffEventQueryResults struct) throws org.apache.thrift.TException { TTupleProtocol iprot = (TTupleProtocol) prot; { org.apache.thrift.protocol.TList _list59 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32()); struct.ageOffEventStates = new ArrayList<CentralAgeOffEventState>(_list59.size); for (int _i60 = 0; _i60 < _list59.size; ++_i60) { CentralAgeOffEventState _elem61; _elem61 = new CentralAgeOffEventState(); _elem61.read(iprot); struct.ageOffEventStates.add(_elem61); } } struct.setAgeOffEventStatesIsSet(true); struct.count = iprot.readI64(); struct.setCountIsSet(true); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.streaming.async; import java.util.UUID; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import com.google.common.net.InetAddresses; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import io.netty.channel.ChannelPromise; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.locator.InetAddressAndPort; import org.apache.cassandra.net.TestChannel; import org.apache.cassandra.net.TestScheduledFuture; import org.apache.cassandra.streaming.PreviewKind; import org.apache.cassandra.streaming.StreamOperation; import org.apache.cassandra.streaming.StreamResultFuture; import org.apache.cassandra.streaming.StreamSession; import org.apache.cassandra.streaming.messages.CompleteMessage; public class NettyStreamingMessageSenderTest { private static final InetAddressAndPort REMOTE_ADDR = InetAddressAndPort.getByAddressOverrideDefaults(InetAddresses.forString("127.0.0.2"), 0); private TestChannel channel; private StreamSession session; private NettyStreamingMessageSender sender; private NettyStreamingMessageSender.FileStreamTask fileStreamTask; @BeforeClass public static void before() { DatabaseDescriptor.daemonInitialization(); } @Before public void setUp() { channel = new TestChannel(Integer.MAX_VALUE); channel.attr(NettyStreamingMessageSender.TRANSFERRING_FILE_ATTR).set(Boolean.FALSE); UUID pendingRepair = UUID.randomUUID(); session = new StreamSession(StreamOperation.BOOTSTRAP, REMOTE_ADDR, (template, messagingVersion) -> null, true, 0, pendingRepair, PreviewKind.ALL); StreamResultFuture future = StreamResultFuture.createFollower(0, UUID.randomUUID(), StreamOperation.REPAIR, REMOTE_ADDR, channel, pendingRepair, session.getPreviewKind()); session.init(future); session.attachOutbound(channel); sender = session.getMessageSender(); sender.setControlMessageChannel(channel); } @After public void tearDown() { if (fileStreamTask != null) fileStreamTask.unsetChannel(); } @Test public void KeepAliveTask_normalSend() { Assert.assertTrue(channel.isOpen()); NettyStreamingMessageSender.KeepAliveTask task = sender.new KeepAliveTask(channel, session); task.run(); Assert.assertTrue(channel.releaseOutbound()); } @Test public void KeepAliveTask_channelClosed() { channel.close(); Assert.assertFalse(channel.isOpen()); channel.releaseOutbound(); NettyStreamingMessageSender.KeepAliveTask task = sender.new KeepAliveTask(channel, session); task.future = new TestScheduledFuture(); Assert.assertFalse(task.future.isCancelled()); task.run(); Assert.assertTrue(task.future.isCancelled()); Assert.assertFalse(channel.releaseOutbound()); } @Test public void KeepAliveTask_closed() { Assert.assertTrue(channel.isOpen()); NettyStreamingMessageSender.KeepAliveTask task = sender.new KeepAliveTask(channel, session); task.future = new TestScheduledFuture(); Assert.assertFalse(task.future.isCancelled()); sender.setClosed(); Assert.assertFalse(sender.connected()); task.run(); Assert.assertTrue(task.future.isCancelled()); Assert.assertFalse(channel.releaseOutbound()); } @Test public void KeepAliveTask_CurrentlyStreaming() { Assert.assertTrue(channel.isOpen()); channel.attr(NettyStreamingMessageSender.TRANSFERRING_FILE_ATTR).set(Boolean.TRUE); NettyStreamingMessageSender.KeepAliveTask task = sender.new KeepAliveTask(channel, session); task.future = new TestScheduledFuture(); Assert.assertFalse(task.future.isCancelled()); Assert.assertTrue(sender.connected()); task.run(); Assert.assertFalse(task.future.isCancelled()); Assert.assertFalse(channel.releaseOutbound()); } @Test public void FileStreamTask_acquirePermit_closed() { fileStreamTask = sender.new FileStreamTask(null); sender.setClosed(); Assert.assertFalse(fileStreamTask.acquirePermit(1)); } @Test public void FileStreamTask_acquirePermit_HapppyPath() { int permits = sender.semaphoreAvailablePermits(); fileStreamTask = sender.new FileStreamTask(null); Assert.assertTrue(fileStreamTask.acquirePermit(1)); Assert.assertEquals(permits - 1, sender.semaphoreAvailablePermits()); } @Test public void FileStreamTask_BadChannelAttr() { int permits = sender.semaphoreAvailablePermits(); channel.attr(NettyStreamingMessageSender.TRANSFERRING_FILE_ATTR).set(Boolean.TRUE); fileStreamTask = sender.new FileStreamTask(null); fileStreamTask.injectChannel(channel); fileStreamTask.run(); Assert.assertEquals(StreamSession.State.FAILED, session.state()); Assert.assertTrue(channel.releaseOutbound()); // when the session fails, it will send a SessionFailed msg Assert.assertEquals(permits, sender.semaphoreAvailablePermits()); } @Test public void FileStreamTask_HappyPath() { int permits = sender.semaphoreAvailablePermits(); fileStreamTask = sender.new FileStreamTask(new CompleteMessage()); fileStreamTask.injectChannel(channel); fileStreamTask.run(); Assert.assertNotEquals(StreamSession.State.FAILED, session.state()); Assert.assertTrue(channel.releaseOutbound()); Assert.assertEquals(permits, sender.semaphoreAvailablePermits()); } @Test public void onControlMessageComplete_HappyPath() { Assert.assertTrue(channel.isOpen()); Assert.assertTrue(sender.connected()); ChannelPromise promise = channel.newPromise(); promise.setSuccess(); Assert.assertNull(sender.onControlMessageComplete(promise, new CompleteMessage())); Assert.assertTrue(channel.isOpen()); Assert.assertTrue(sender.connected()); Assert.assertNotEquals(StreamSession.State.FAILED, session.state()); } @Test public void onControlMessageComplete_Exception() throws InterruptedException, ExecutionException, TimeoutException { Assert.assertTrue(channel.isOpen()); Assert.assertTrue(sender.connected()); ChannelPromise promise = channel.newPromise(); promise.setFailure(new RuntimeException("this is just a testing exception")); Future f = sender.onControlMessageComplete(promise, new CompleteMessage()); f.get(5, TimeUnit.SECONDS); Assert.assertFalse(channel.isOpen()); Assert.assertFalse(sender.connected()); Assert.assertEquals(StreamSession.State.FAILED, session.state()); } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.compiler.integrationtests; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import org.junit.Before; import org.junit.Test; import org.kie.internal.KnowledgeBase; import org.kie.internal.KnowledgeBaseFactory; import org.kie.internal.builder.KnowledgeBuilder; import org.kie.internal.builder.KnowledgeBuilderFactory; import org.kie.internal.io.ResourceFactory; import org.kie.api.io.ResourceType; import org.kie.internal.runtime.StatefulKnowledgeSession; import org.kie.api.runtime.rule.FactHandle; import org.kie.api.runtime.rule.QueryResults; public class QueryTest3 { private KnowledgeBase knowledgeBase; /** * @throws java.lang.Exception */ @Before public void setUp() throws Exception { String text = ""; text += "package org.drools.integrationtests\n"; text += "import " + QueryTest3.Bar.class.getCanonicalName() + "\n"; text += "import " + QueryTest3.Foo.class.getCanonicalName() + "\n"; text += "import " + QueryTest3.Foo2.class.getCanonicalName() + "\n"; text += "query \"testDifferent\"\n"; text += " foo : Foo();\n"; text += " bar : Bar(id == foo.id)\n"; text += "end\n"; text += "query \"testSame\"\n"; text += " foo : Foo();\n"; text += " foo2 : Foo(id == foo.id);\n"; text += "end\n"; text += "query \"testExtends\"\n"; text += " foo : Foo();\n"; text += " foo2 : Foo2(id == foo.id);\n"; text += "end\n"; KnowledgeBuilder knowledgeBuilder = KnowledgeBuilderFactory.newKnowledgeBuilder(); knowledgeBuilder.add( ResourceFactory.newByteArrayResource(text.getBytes()), ResourceType.DRL ); assertFalse( knowledgeBuilder.hasErrors() ); knowledgeBase = KnowledgeBaseFactory.newKnowledgeBase(); knowledgeBase.addKnowledgePackages( knowledgeBuilder.getKnowledgePackages() ); } private void doIt(Object o1, Object o2, String query, int expected, boolean doUpdate, boolean doRetract) { StatefulKnowledgeSession knowledgeSession = knowledgeBase.newStatefulKnowledgeSession(); try { knowledgeSession.insert( o1 ); FactHandle handle2 = knowledgeSession.insert( o2 ); if ( doUpdate ) { knowledgeSession.update( handle2, o2 ); } else if ( doRetract ) { knowledgeSession.retract( handle2 ); handle2 = knowledgeSession.insert( o2 ); } QueryResults queryResults = knowledgeSession.getQueryResults( query ); assertEquals( expected, queryResults.size() ); } finally { knowledgeSession.dispose(); } } @Test public void testDifferent() { Foo foo = new Foo(); foo.setId( "x" ); Bar bar = new Bar(); bar.setId( "x" ); doIt( foo, bar, "testDifferent", 1, false, false ); } @Test public void testDifferentWithUpdate() { Foo foo = new Foo(); foo.setId( "x" ); Bar bar = new Bar(); bar.setId( "x" ); doIt( foo, bar, "testDifferent", 1, true, false ); } @Test public void testSame() { Foo foo = new Foo(); foo.setId( "x" ); Foo foo2 = new Foo(); foo2.setId( "x" ); doIt( foo, foo2, "testSame", 4, false, false ); } @Test public void testSameWithUpdate() { Foo foo = new Foo(); foo.setId( "x" ); Foo foo2 = new Foo(); foo2.setId( "x" ); doIt( foo, foo2, "testSame", 4, true, false ); } @Test public void testExtends() { Foo foo = new Foo(); foo.setId( "x" ); Foo2 foo2 = new Foo2(); foo2.setId( "x" ); doIt( foo, foo2, "testExtends", 2, false, false ); } @Test public void testExtendsWithUpdate() { Foo foo = new Foo(); foo.setId( "x" ); Foo2 foo2 = new Foo2(); foo2.setId( "x" ); doIt( foo, foo2, "testExtends", 2, true, false ); } @Test public void testExtendsWithRetract() { Foo foo = new Foo(); foo.setId( "x" ); Foo2 foo2 = new Foo2(); foo2.setId( "x" ); doIt( foo, foo2, "testExtends", 2, false, true ); } public static class Bar { private String id; public String getId() { return id; } public void setId(String id) { this.id = id; } } public static class Foo { private String id; public String getId() { return id; } public void setId(String id) { this.id = id; } } public static class Foo2 extends Foo { } }
/*------------------------------------------------------------------------- * * Copyright (c) 2004-2011, PostgreSQL Global Development Group * * *------------------------------------------------------------------------- */ package org.postgresql.test.jdbc2; import org.postgresql.test.TestUtil; import org.postgresql.test.util.BrokenInputStream; import junit.framework.TestCase; import java.io.*; import java.sql.*; import java.math.BigDecimal; public class PreparedStatementTest extends TestCase { private Connection conn; public PreparedStatementTest(String name) { super(name); try { org.postgresql.Driver driver = new org.postgresql.Driver(); } catch (Exception ex) {;} } protected void setUp() throws Exception { conn = TestUtil.openDB(); TestUtil.createTable(conn, "streamtable", "bin bytea, str text"); TestUtil.createTable(conn, "texttable", "ch char(3), te text, vc varchar(3)"); TestUtil.createTable(conn, "intervaltable", "i interval"); } protected void tearDown() throws SQLException { TestUtil.dropTable(conn, "streamtable"); TestUtil.dropTable(conn, "texttable"); TestUtil.dropTable(conn, "intervaltable"); TestUtil.closeDB(conn); } public void testSetBinaryStream() throws SQLException { ByteArrayInputStream bais; byte buf[] = new byte[10]; for (int i = 0; i < buf.length; i++) { buf[i] = (byte)i; } bais = null; doSetBinaryStream(bais, 0); bais = new ByteArrayInputStream(new byte[0]); doSetBinaryStream(bais, 0); bais = new ByteArrayInputStream(buf); doSetBinaryStream(bais, 0); bais = new ByteArrayInputStream(buf); doSetBinaryStream(bais, 10); } public void testSetAsciiStream() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); PrintWriter pw = new PrintWriter(new OutputStreamWriter(baos, "ASCII")); pw.println("Hello"); pw.flush(); ByteArrayInputStream bais; bais = new ByteArrayInputStream(baos.toByteArray()); doSetAsciiStream(bais, 0); bais = new ByteArrayInputStream(baos.toByteArray()); doSetAsciiStream(bais, 6); bais = new ByteArrayInputStream(baos.toByteArray()); doSetAsciiStream(bais, 100); } public void testExecuteStringOnPreparedStatement() throws Exception { PreparedStatement pstmt = conn.prepareStatement("SELECT 1"); try { pstmt.executeQuery("SELECT 2"); fail("Expected an exception when executing a new SQL query on a prepared statement"); } catch (SQLException e) { } try { pstmt.executeUpdate("UPDATE streamtable SET bin=bin"); fail("Expected an exception when executing a new SQL update on a prepared statement"); } catch (SQLException e) { } try { pstmt.execute("UPDATE streamtable SET bin=bin"); fail("Expected an exception when executing a new SQL statement on a prepared statement"); } catch (SQLException e) { } } public void testBinaryStreamErrorsRestartable() throws SQLException { // The V2 protocol does not have the ability to recover when // streaming data to the server. We could potentially try // introducing a syntax error to force the query to fail, but // that seems dangerous. // if (!TestUtil.isProtocolVersion(conn, 3)) { return ; } byte buf[] = new byte[10]; for (int i = 0; i < buf.length; i++) { buf[i] = (byte)i; } // InputStream is shorter than the length argument implies. InputStream is = new ByteArrayInputStream(buf); runBrokenStream(is, buf.length + 1); // InputStream throws an Exception during read. is = new BrokenInputStream(new ByteArrayInputStream(buf), buf.length / 2); runBrokenStream(is, buf.length); // Invalid length < 0. is = new ByteArrayInputStream(buf); runBrokenStream(is, -1); // Total Bind message length too long. is = new ByteArrayInputStream(buf); runBrokenStream(is, Integer.MAX_VALUE); } private void runBrokenStream(InputStream is, int length) throws SQLException { PreparedStatement pstmt = null; try { pstmt = conn.prepareStatement("INSERT INTO streamtable (bin,str) VALUES (?,?)"); pstmt.setBinaryStream(1, is, length); pstmt.setString(2, "Other"); pstmt.executeUpdate(); fail("This isn't supposed to work."); } catch (SQLException sqle) { // don't need to rollback because we're in autocommit mode pstmt.close(); // verify the connection is still valid and the row didn't go in. Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("SELECT COUNT(*) FROM streamtable"); assertTrue(rs.next()); assertEquals(0, rs.getInt(1)); rs.close(); stmt.close(); } } private void doSetBinaryStream(ByteArrayInputStream bais, int length) throws SQLException { PreparedStatement pstmt = conn.prepareStatement("INSERT INTO streamtable (bin,str) VALUES (?,?)"); pstmt.setBinaryStream(1, bais, length); pstmt.setString(2, null); pstmt.executeUpdate(); pstmt.close(); } private void doSetAsciiStream(InputStream is, int length) throws SQLException { PreparedStatement pstmt = conn.prepareStatement("INSERT INTO streamtable (bin,str) VALUES (?,?)"); pstmt.setBytes(1, null); pstmt.setAsciiStream(2, is, length); pstmt.executeUpdate(); pstmt.close(); } public void testTrailingSpaces() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("INSERT INTO texttable (ch, te, vc) VALUES (?, ?, ?) "); String str = "a "; pstmt.setString(1, str); pstmt.setString(2, str); pstmt.setString(3, str); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement("SELECT ch, te, vc FROM texttable WHERE ch=? AND te=? AND vc=?"); pstmt.setString(1, str); pstmt.setString(2, str); pstmt.setString(3, str); ResultSet rs = pstmt.executeQuery(); assertTrue(rs.next()); assertEquals(str, rs.getString(1)); assertEquals(str, rs.getString(2)); assertEquals(str, rs.getString(3)); rs.close(); pstmt.close(); } public void testSetNull() throws SQLException { // valid: fully qualified type to setNull() PreparedStatement pstmt = conn.prepareStatement("INSERT INTO texttable (te) VALUES (?)"); pstmt.setNull(1, Types.VARCHAR); pstmt.executeUpdate(); // valid: fully qualified type to setObject() pstmt.setObject(1, null, Types.VARCHAR); pstmt.executeUpdate(); // valid: setObject() with partial type info and a typed "null object instance" org.postgresql.util.PGobject dummy = new org.postgresql.util.PGobject(); dummy.setType("text"); dummy.setValue(null); pstmt.setObject(1, dummy, Types.OTHER); pstmt.executeUpdate(); // setObject() with no type info pstmt.setObject(1, null); pstmt.executeUpdate(); // setObject() with insufficient type info pstmt.setObject(1, null, Types.OTHER); pstmt.executeUpdate(); // setNull() with insufficient type info pstmt.setNull(1, Types.OTHER); pstmt.executeUpdate(); pstmt.close(); } public void testSingleQuotes() throws SQLException { String[] testStrings = new String[] { "bare ? question mark", "quoted \\' single quote", "doubled '' single quote", "octal \\060 constant", "escaped \\? question mark", "double \\\\ backslash", "double \" quote", }; String[] testStringsStdConf = new String[] { "bare ? question mark", "quoted '' single quote", "doubled '' single quote", "octal 0 constant", "escaped ? question mark", "double \\ backslash", "double \" quote", }; String[] expected = new String[] { "bare ? question mark", "quoted ' single quote", "doubled ' single quote", "octal 0 constant", "escaped ? question mark", "double \\ backslash", "double \" quote", }; if (! TestUtil.haveMinimumServerVersion(conn, "8.2")) { for (int i = 0; i < testStrings.length; ++i) { PreparedStatement pstmt = conn.prepareStatement("SELECT '" + testStrings[i] + "'"); ResultSet rs = pstmt.executeQuery(); assertTrue(rs.next()); assertEquals(expected[i], rs.getString(1)); rs.close(); pstmt.close(); } } else { boolean oldStdStrings = TestUtil.getStandardConformingStrings(conn); Statement stmt = conn.createStatement(); // Test with standard_conforming_strings turned off. stmt.execute("SET standard_conforming_strings TO off"); for (int i = 0; i < testStrings.length; ++i) { PreparedStatement pstmt = conn.prepareStatement("SELECT '" + testStrings[i] + "'"); ResultSet rs = pstmt.executeQuery(); assertTrue(rs.next()); assertEquals(expected[i], rs.getString(1)); rs.close(); pstmt.close(); } // Test with standard_conforming_strings turned off... // ... using the escape string syntax (E''). stmt.execute("SET standard_conforming_strings TO on"); for (int i = 0; i < testStrings.length; ++i) { PreparedStatement pstmt = conn.prepareStatement("SELECT E'" + testStrings[i] + "'"); ResultSet rs = pstmt.executeQuery(); assertTrue(rs.next()); assertEquals(expected[i], rs.getString(1)); rs.close(); pstmt.close(); } // ... using standard conforming input strings. for (int i = 0; i < testStrings.length; ++i) { PreparedStatement pstmt = conn.prepareStatement("SELECT '" + testStringsStdConf[i] + "'"); ResultSet rs = pstmt.executeQuery(); assertTrue(rs.next()); assertEquals(expected[i], rs.getString(1)); rs.close(); pstmt.close(); } stmt.execute("SET standard_conforming_strings TO " + (oldStdStrings ? "on" : "off")); stmt.close(); } } public void testDoubleQuotes() throws SQLException { String[] testStrings = new String[] { "bare ? question mark", "single ' quote", "doubled '' single quote", "doubled \"\" double quote", "no backslash interpretation here: \\", }; for (int i = 0; i < testStrings.length; ++i) { PreparedStatement pstmt = conn.prepareStatement("CREATE TABLE \"" + testStrings[i] + "\" (i integer)"); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement("DROP TABLE \"" + testStrings[i] + "\""); pstmt.executeUpdate(); pstmt.close(); } } public void testDollarQuotes() throws SQLException { // dollar-quotes are supported in the backend since version 8.0 if (!TestUtil.haveMinimumServerVersion(conn, "8.0")) return; PreparedStatement st; ResultSet rs; st = conn.prepareStatement("SELECT $$;$$ WHERE $x$?$x$=$_0$?$_0$ AND $$?$$=?"); st.setString(1, "?"); rs = st.executeQuery(); assertTrue(rs.next()); assertEquals(";", rs.getString(1)); assertFalse(rs.next()); st.close(); st = conn.prepareStatement( "SELECT $__$;$__$ WHERE ''''=$q_1$'$q_1$ AND ';'=?;" + "SELECT $x$$a$;$x $a$$x$ WHERE $$;$$=? OR ''=$c$c$;$c$;" + "SELECT ?"); st.setString(1, ";"); st.setString(2, ";"); st.setString(3, "$a$ $a$"); assertTrue(st.execute()); rs = st.getResultSet(); assertTrue(rs.next()); assertEquals(";", rs.getString(1)); assertFalse(rs.next()); assertTrue(st.getMoreResults()); rs = st.getResultSet(); assertTrue(rs.next()); assertEquals("$a$;$x $a$", rs.getString(1)); assertFalse(rs.next()); assertTrue(st.getMoreResults()); rs = st.getResultSet(); assertTrue(rs.next()); assertEquals("$a$ $a$", rs.getString(1)); assertFalse(rs.next()); st.close(); } public void testDollarQuotesAndIdentifiers() throws SQLException { // dollar-quotes are supported in the backend since version 8.0 if (!TestUtil.haveMinimumServerVersion(conn, "8.0")) return; PreparedStatement st; conn.createStatement().execute("CREATE TEMP TABLE a$b$c(a varchar, b varchar)"); st = conn.prepareStatement("INSERT INTO a$b$c (a, b) VALUES (?, ?)"); st.setString(1, "a"); st.setString(2, "b"); st.executeUpdate(); st.close(); conn.createStatement().execute("CREATE TEMP TABLE e$f$g(h varchar, e$f$g varchar) "); st = conn.prepareStatement("UPDATE e$f$g SET h = ? || e$f$g"); st.setString(1, "a"); st.executeUpdate(); st.close(); } public void testComments() throws SQLException { PreparedStatement st; ResultSet rs; st = conn.prepareStatement("SELECT /*?*/ /*/*/*/**/*/*/*/1;SELECT ?;--SELECT ?"); st.setString(1, "a"); assertTrue(st.execute()); assertTrue(st.getMoreResults()); assertFalse(st.getMoreResults()); st.close(); st = conn.prepareStatement("SELECT /**/'?'/*/**/*/ WHERE '?'=/*/*/*?*/*/*/--?\n?"); st.setString(1, "?"); rs = st.executeQuery(); assertTrue(rs.next()); assertEquals("?", rs.getString(1)); assertFalse(rs.next()); st.close(); } public void testDouble() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE TEMP TABLE double_tab (max_double float, min_double float, null_value float)"); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "insert into double_tab values (?,?,?)"); pstmt.setDouble(1, 1.0E125); pstmt.setDouble(2, 1.0E-130); pstmt.setNull(3,Types.DOUBLE); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from double_tab"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); double d = rs.getDouble(1); assertTrue( rs.getDouble(1) == 1.0E125 ); assertTrue( rs.getDouble(2) == 1.0E-130 ); rs.getDouble(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testFloat() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE TEMP TABLE float_tab (max_float real, min_float real, null_value real)"); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "insert into float_tab values (?,?,?)"); pstmt.setFloat(1,(float)1.0E37 ); pstmt.setFloat(2, (float)1.0E-37); pstmt.setNull(3,Types.FLOAT); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from float_tab"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); float f = rs.getFloat(1); assertTrue( "expected 1.0E37,received " + rs.getFloat(1), rs.getFloat(1) == (float)1.0E37 ); assertTrue( "expected 1.0E-37,received " + rs.getFloat(2), rs.getFloat(2) == (float)1.0E-37 ); rs.getDouble(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testBoolean() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE TEMP TABLE bool_tab (max_val boolean, min_val boolean, null_val boolean)"); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "insert into bool_tab values (?,?,?)"); pstmt.setBoolean(1,true ); pstmt.setBoolean(2, false); pstmt.setNull(3,Types.BIT); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from bool_tab"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected true,received " + rs.getBoolean(1), rs.getBoolean(1) == true ); assertTrue( "expected false,received " + rs.getBoolean(2), rs.getBoolean(2) == false ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testSetFloatInteger() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE temp TABLE float_tab (max_val float8, min_val float, null_val float8)"); pstmt.executeUpdate(); pstmt.close(); Integer maxInteger= new Integer(2147483647), minInteger = new Integer(-2147483648); Double maxFloat=new Double( 2147483647), minFloat = new Double( -2147483648 ); pstmt = conn.prepareStatement( "insert into float_tab values (?,?,?)"); pstmt.setObject(1,maxInteger,Types.FLOAT ); pstmt.setObject(2,minInteger,Types.FLOAT); pstmt.setNull(3,Types.FLOAT); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from float_tab"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected "+maxFloat+" ,received " + rs.getObject(1), ((Double)rs.getObject(1)).equals(maxFloat) ); assertTrue( "expected "+minFloat+" ,received " + rs.getObject(2), ((Double)rs.getObject(2)).equals( minFloat) ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testSetFloatString() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE temp TABLE float_tab (max_val float8, min_val float8, null_val float8)"); pstmt.executeUpdate(); pstmt.close(); String maxStringFloat = new String("1.0E37"), minStringFloat = new String("1.0E-37"); Double maxFloat=new Double(1.0E37), minFloat = new Double( 1.0E-37 ); pstmt = conn.prepareStatement( "insert into float_tab values (?,?,?)"); pstmt.setObject(1,maxStringFloat,Types.FLOAT ); pstmt.setObject(2,minStringFloat,Types.FLOAT ); pstmt.setNull(3,Types.FLOAT); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from float_tab"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected true,received " + rs.getObject(1), ((Double)rs.getObject(1)).equals(maxFloat) ); assertTrue( "expected false,received " + rs.getBoolean(2), ((Double)rs.getObject(2)).equals( minFloat) ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testSetFloatBigDecimal() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE temp TABLE float_tab (max_val float8, min_val float8, null_val float8)"); pstmt.executeUpdate(); pstmt.close(); BigDecimal maxBigDecimalFloat = new BigDecimal("1.0E37"), minBigDecimalFloat = new BigDecimal("1.0E-37"); Double maxFloat=new Double(1.0E37), minFloat = new Double( 1.0E-37 ); pstmt = conn.prepareStatement( "insert into float_tab values (?,?,?)"); pstmt.setObject(1,maxBigDecimalFloat,Types.FLOAT ); pstmt.setObject(2,minBigDecimalFloat,Types.FLOAT ); pstmt.setNull(3,Types.FLOAT); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from float_tab"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected " + maxFloat + " ,received " + rs.getObject(1), ((Double)rs.getObject(1)).equals(maxFloat) ); assertTrue( "expected " + minFloat + " ,received " + rs.getObject(2), ((Double)rs.getObject(2)).equals( minFloat) ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testSetTinyIntFloat() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE temp TABLE tiny_int (max_val int4, min_val int4, null_val int4)"); pstmt.executeUpdate(); pstmt.close(); Integer maxInt = new Integer( 127 ), minInt = new Integer(-127); Float maxIntFloat = new Float( 127 ), minIntFloat = new Float( -127 ); pstmt = conn.prepareStatement( "insert into tiny_int values (?,?,?)"); pstmt.setObject(1,maxIntFloat,Types.TINYINT ); pstmt.setObject(2,minIntFloat,Types.TINYINT ); pstmt.setNull(3,Types.TINYINT); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from tiny_int"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected " + maxInt+" ,received " + rs.getObject(1), ((Integer)rs.getObject(1)).equals( maxInt ) ); assertTrue( "expected " + minInt+" ,received " + rs.getObject(2), ((Integer)rs.getObject(2)).equals( minInt ) ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testSetSmallIntFloat() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE temp TABLE small_int (max_val int4, min_val int4, null_val int4)"); pstmt.executeUpdate(); pstmt.close(); Integer maxInt = new Integer( 32767 ), minInt = new Integer(-32768); Float maxIntFloat = new Float( 32767 ), minIntFloat = new Float( -32768 ); pstmt = conn.prepareStatement( "insert into small_int values (?,?,?)"); pstmt.setObject(1,maxIntFloat,Types.SMALLINT ); pstmt.setObject(2,minIntFloat,Types.SMALLINT ); pstmt.setNull(3,Types.TINYINT); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from small_int"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected " + maxInt+" ,received " + rs.getObject(1), ((Integer)rs.getObject(1)).equals( maxInt ) ); assertTrue( "expected " + minInt+" ,received " + rs.getObject(2), ((Integer)rs.getObject(2)).equals( minInt ) ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testSetIntFloat() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE temp TABLE int_TAB (max_val int4, min_val int4, null_val int4)"); pstmt.executeUpdate(); pstmt.close(); Integer maxInt = new Integer( 1000 ), minInt = new Integer(-1000); Float maxIntFloat = new Float( 1000 ), minIntFloat = new Float( -1000 ); pstmt = conn.prepareStatement( "insert into int_tab values (?,?,?)"); pstmt.setObject(1,maxIntFloat,Types.INTEGER ); pstmt.setObject(2,minIntFloat,Types.INTEGER ); pstmt.setNull(3,Types.INTEGER); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from int_tab"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected " + maxInt+" ,received " + rs.getObject(1), ((Integer)rs.getObject(1)).equals( maxInt ) ); assertTrue( "expected " + minInt+" ,received " + rs.getObject(2), ((Integer)rs.getObject(2)).equals( minInt ) ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testSetBooleanDouble() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE temp TABLE double_tab (max_val float, min_val float, null_val float)"); pstmt.executeUpdate(); pstmt.close(); Boolean trueVal = Boolean.TRUE, falseVal = Boolean.FALSE; Double dBooleanTrue = new Double(1), dBooleanFalse = new Double( 0 ); pstmt = conn.prepareStatement( "insert into double_tab values (?,?,?)"); pstmt.setObject(1,trueVal,Types.DOUBLE ); pstmt.setObject(2,falseVal,Types.DOUBLE ); pstmt.setNull(3,Types.DOUBLE); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from double_tab"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected " + dBooleanTrue + " ,received " + rs.getObject(1), ((Double)rs.getObject(1)).equals( dBooleanTrue ) ); assertTrue( "expected " + dBooleanFalse + " ,received " + rs.getObject(2), ((Double)rs.getObject(2)).equals( dBooleanFalse ) ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testSetBooleanNumeric() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE temp TABLE numeric_tab (max_val numeric(30,15), min_val numeric(30,15), null_val numeric(30,15))"); pstmt.executeUpdate(); pstmt.close(); Boolean trueVal = Boolean.TRUE, falseVal = Boolean.FALSE; BigDecimal dBooleanTrue = new BigDecimal(1), dBooleanFalse = new BigDecimal( 0 ); pstmt = conn.prepareStatement( "insert into numeric_tab values (?,?,?)"); pstmt.setObject(1,trueVal,Types.NUMERIC,2 ); pstmt.setObject(2,falseVal,Types.NUMERIC,2 ); pstmt.setNull(3,Types.DOUBLE); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from numeric_tab"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected " + dBooleanTrue + " ,received " + rs.getObject(1), ((BigDecimal)rs.getObject(1)).compareTo( dBooleanTrue )==0 ); assertTrue( "expected " + dBooleanFalse + " ,received " + rs.getObject(2), ((BigDecimal)rs.getObject(2)).compareTo( dBooleanFalse )==0 ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testSetBooleanDecimal() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("CREATE temp TABLE DECIMAL_TAB (max_val numeric(30,15), min_val numeric(30,15), null_val numeric(30,15))"); pstmt.executeUpdate(); pstmt.close(); Boolean trueVal = Boolean.TRUE, falseVal = Boolean.FALSE; BigDecimal dBooleanTrue = new BigDecimal(1), dBooleanFalse = new BigDecimal( 0 ); pstmt = conn.prepareStatement( "insert into DECIMAL_TAB values (?,?,?)"); pstmt.setObject(1,trueVal,Types.DECIMAL,2 ); pstmt.setObject(2,falseVal,Types.DECIMAL,2 ); pstmt.setNull(3,Types.DOUBLE); pstmt.executeUpdate(); pstmt.close(); pstmt = conn.prepareStatement( "select * from DECIMAL_TAB"); ResultSet rs = pstmt.executeQuery(); assertTrue( rs.next()); assertTrue( "expected " + dBooleanTrue + " ,received " + rs.getObject(1), ((BigDecimal)rs.getObject(1)).compareTo( dBooleanTrue )==0 ); assertTrue( "expected " + dBooleanFalse + " ,received " + rs.getObject(2), ((BigDecimal)rs.getObject(2)).compareTo( dBooleanFalse )==0 ); rs.getFloat(3); assertTrue( rs.wasNull() ); rs.close(); pstmt.close(); } public void testUnknownSetObject() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("INSERT INTO intervaltable(i) VALUES (?)"); if (TestUtil.isProtocolVersion(conn, 3)) { pstmt.setString(1, "1 week"); try { pstmt.executeUpdate(); fail("Should have failed with type mismatch."); } catch (SQLException sqle) { } } pstmt.setObject(1, "1 week", Types.OTHER); pstmt.executeUpdate(); pstmt.close(); } /** * With autoboxing this apparently happens more often now. */ public void testSetObjectCharacter() throws SQLException { PreparedStatement ps = conn.prepareStatement("INSERT INTO texttable(te) VALUES (?)"); ps.setObject(1, new Character('z')); ps.executeUpdate(); ps.close(); } /** * When we have parameters of unknown type and it's not using * the unnamed statement, we issue a protocol level statment * describe message for the V3 protocol. This test just makes * sure that works. */ public void testStatementDescribe() throws SQLException { PreparedStatement pstmt = conn.prepareStatement("SELECT ?::int"); pstmt.setObject(1, new Integer(2), Types.OTHER); for (int i=0; i<10; i++) { ResultSet rs = pstmt.executeQuery(); assertTrue(rs.next()); assertEquals(2, rs.getInt(1)); rs.close(); } pstmt.close(); } }
/* * Copyright 2011 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import com.google.common.collect.Lists; import com.google.javascript.jscomp.ReferenceCollectingCallback.Behavior; import com.google.javascript.jscomp.ReferenceCollectingCallback.Reference; import com.google.javascript.jscomp.ReferenceCollectingCallback.ReferenceCollection; import com.google.javascript.jscomp.ReferenceCollectingCallback.ReferenceMap; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * Using the infrastructure provided by {@link ReferenceCollectingCallback}, * identify variables that are only ever assigned to object literals * and that are never used in entirety, and expand the objects into * individual variables. * * Based on the InlineVariables pass * */ class InlineObjectLiterals implements CompilerPass { public static final String VAR_PREFIX = "JSCompiler_object_inline_"; private final AbstractCompiler compiler; private final Supplier<String> safeNameIdSupplier; InlineObjectLiterals( AbstractCompiler compiler, Supplier<String> safeNameIdSupplier) { this.compiler = compiler; this.safeNameIdSupplier = safeNameIdSupplier; } @Override public void process(Node externs, Node root) { ReferenceCollectingCallback callback = new ReferenceCollectingCallback( compiler, new InliningBehavior()); callback.process(externs, root); } /** * Builds up information about nodes in each scope. When exiting the * scope, inspects all variables in that scope, and inlines any * that we can. */ private class InliningBehavior implements Behavior { /** * A list of variables that should not be inlined, because their * reference information is out of sync with the state of the AST. */ private final Set<Var> staleVars = new HashSet<>(); @Override public void afterExitScope(NodeTraversal t, ReferenceMap referenceMap) { for (Iterator<Var> it = t.getScope().getVars(); it.hasNext();) { Var v = it.next(); if (isVarInlineForbidden(v)) { continue; } ReferenceCollection referenceInfo = referenceMap.getReferences(v); if (isInlinableObject(referenceInfo.references)) { // Blacklist the object itself, as well as any other values // that it refers to, since they will have been moved around. staleVars.add(v); Reference init = referenceInfo.getInitializingReference(); // Split up the object into individual variables if the object // is never referenced directly in full. splitObject(v, init, referenceInfo); } } } /** * If there are any variable references in the given node tree, * blacklist them to prevent the pass from trying to inline the * variable. Any code modifications will have potentially made the * ReferenceCollection invalid. */ private void blacklistVarReferencesInTree(Node root, final Scope scope) { NodeUtil.visitPreOrder(root, new NodeUtil.Visitor() { @Override public void visit(Node node) { if (node.isName()) { staleVars.add(scope.getVar(node.getString())); } } }, NodeUtil.MATCH_NOT_FUNCTION); } /** * Whether the given variable is forbidden from being inlined. */ private boolean isVarInlineForbidden(Var var) { // A variable may not be inlined if: // 1) The variable is defined in the externs // 2) The variable is exported, // 3) Don't inline the special RENAME_PROPERTY_FUNCTION_NAME // 4) A reference to the variable has been inlined. We're downstream // of the mechanism that creates variable references, so we don't // have a good way to update the reference. Just punt on it. // Additionally, exclude global variables for now. return var.isGlobal() || var.isExtern() || compiler.getCodingConvention().isExported(var.name) || RenameProperties.RENAME_PROPERTY_FUNCTION_NAME.equals(var.name) || staleVars.contains(var); } /** * Counts the number of direct (full) references to an object. * Specifically, we check for references of the following type: * <pre> * x; * x.fn(); * </pre> */ private boolean isInlinableObject(List<Reference> refs) { boolean ret = false; Set<String> validProperties = new HashSet<>(); for (Reference ref : refs) { Node name = ref.getNode(); Node parent = ref.getParent(); Node gramps = ref.getGrandparent(); // Ignore most indirect references, like x.y (but not x.y(), // since the function referenced by y might reference 'this'). // if (parent.isGetProp()) { Preconditions.checkState(parent.getFirstChild() == name); // A call target may be using the object as a 'this' value. if (gramps.isCall() && gramps.getFirstChild() == parent) { return false; } // Deleting a property has different semantics from deleting // a variable, so deleted properties should not be inlined. if (gramps.isDelProp()) { return false; } // NOTE(nicksantos): This pass's object-splitting algorithm has // a blind spot. It assumes that if a property isn't defined on an // object, then the value is undefined. This is not true, because // Object.prototype can have arbitrary properties on it. // // We short-circuit this problem by bailing out if we see a reference // to a property that isn't defined on the object literal. This // isn't a perfect algorithm, but it should catch most cases. String propName = parent.getLastChild().getString(); if (!validProperties.contains(propName)) { if (NodeUtil.isVarOrSimpleAssignLhs(parent, gramps)) { validProperties.add(propName); } else { return false; } } continue; } // Only rewrite VAR declarations or simple assignment statements if (!isVarOrAssignExprLhs(name)) { return false; } Node val = ref.getAssignedValue(); if (val == null) { // A var with no assignment. continue; } // We're looking for object literal assignments only. if (!val.isObjectLit()) { return false; } // Make sure that the value is not self-referential. IOW, // disallow things like x = {b: x.a}. // // TODO(dimvar): Only exclude unorderable self-referential // assignments. i.e. x = {a: x.b, b: x.a} is not orderable, // but x = {a: 1, b: x.a} is. // // Also, ES5 getters/setters aren't handled by this pass. for (Node child = val.getFirstChild(); child != null; child = child.getNext()) { if (child.isGetterDef() || child.isSetterDef()) { // ES5 get/set not supported. return false; } validProperties.add(child.getString()); Node childVal = child.getFirstChild(); // Check if childVal is the parent of any of the passed in // references, as that is how self-referential assignments // will happen. for (Reference t : refs) { Node refNode = t.getParent(); while (!NodeUtil.isStatementBlock(refNode)) { if (refNode == childVal) { // There's a self-referential assignment return false; } refNode = refNode.getParent(); } } } // We have found an acceptable object literal assignment. As // long as there are no other assignments that mess things up, // we can inline. ret = true; } return ret; } private boolean isVarOrAssignExprLhs(Node n) { Node parent = n.getParent(); return parent.isVar() || (parent.isAssign() && parent.getFirstChild() == n && parent.getParent().isExprResult()); } /** * Computes a list of ever-referenced keys in the object being * inlined, and returns a mapping of key name -> generated * variable name. */ private Map<String, String> computeVarList( ReferenceCollection referenceInfo) { Map<String, String> varmap = new LinkedHashMap<>(); for (Reference ref : referenceInfo.references) { if (ref.isLvalue() || ref.isInitializingDeclaration()) { Node val = ref.getAssignedValue(); if (val != null) { Preconditions.checkState(val.isObjectLit()); for (Node child = val.getFirstChild(); child != null; child = child.getNext()) { String varname = child.getString(); if (varmap.containsKey(varname)) { continue; } String var = VAR_PREFIX + varname + "_" + safeNameIdSupplier.get(); varmap.put(varname, var); } } } else if (ref.getParent().isVar()) { // This is the var. There is no value. } else { Node getprop = ref.getParent(); Preconditions.checkState(getprop.isGetProp()); // The key being looked up in the original map. String varname = getprop.getLastChild().getString(); if (varmap.containsKey(varname)) { continue; } String var = VAR_PREFIX + varname + "_" + safeNameIdSupplier.get(); varmap.put(varname, var); } } return varmap; } /** * Populates a map of key names -> initial assigned values. The * object literal these are being pulled from is invalidated as * a result. */ private void fillInitialValues(Reference init, Map<String, Node> initvals) { Node object = init.getAssignedValue(); Preconditions.checkState(object.isObjectLit()); for (Node key = object.getFirstChild(); key != null; key = key.getNext()) { initvals.put(key.getString(), key.removeFirstChild()); } } /** * Replaces an assignment like x = {...} with t1=a,t2=b,t3=c,true. * Note that the resulting expression will always evaluate to * true, as would the x = {...} expression. */ private void replaceAssignmentExpression(Var v, Reference ref, Map<String, String> varmap) { // Compute all of the assignments necessary List<Node> nodes = new ArrayList<>(); Node val = ref.getAssignedValue(); blacklistVarReferencesInTree(val, v.scope); Preconditions.checkState(val.isObjectLit()); Set<String> all = new LinkedHashSet<>(varmap.keySet()); for (Node key = val.getFirstChild(); key != null; key = key.getNext()) { String var = key.getString(); Node value = key.removeFirstChild(); // TODO(user): Copy type information. nodes.add( IR.assign( IR.name(varmap.get(var)), value)); all.remove(var); } // TODO(user): Better source information. for (String var : all) { nodes.add( IR.assign( IR.name(varmap.get(var)), NodeUtil.newUndefinedNode(null))); } Node replacement; if (nodes.isEmpty()) { replacement = IR.trueNode(); } else { // All assignments evaluate to true, so make sure that the // expr statement evaluates to true in case it matters. nodes.add(IR.trueNode()); // Join these using COMMA. A COMMA node must have 2 children, so we // create a tree. In the tree the first child be the COMMA to match // the parser, otherwise tree equality tests fail. nodes = Lists.reverse(nodes); replacement = new Node(Token.COMMA); Node cur = replacement; int i; for (i = 0; i < nodes.size() - 2; i++) { cur.addChildToFront(nodes.get(i)); Node t = new Node(Token.COMMA); cur.addChildToFront(t); cur = t; } cur.addChildToFront(nodes.get(i)); cur.addChildToFront(nodes.get(i + 1)); } Node replace = ref.getParent(); replacement.copyInformationFromForTree(replace); if (replace.isVar()) { replace.getParent().replaceChild( replace, NodeUtil.newExpr(replacement)); } else { replace.getParent().replaceChild(replace, replacement); } } /** * Splits up the object literal into individual variables, and * updates all uses. */ private void splitObject(Var v, Reference init, ReferenceCollection referenceInfo) { // First figure out the FULL set of possible keys, so that they // can all be properly set as necessary. Map<String, String> varmap = computeVarList(referenceInfo); Map<String, Node> initvals = new HashMap<>(); // Figure out the top-level of the var assign node. If it's a plain // ASSIGN, then there's an EXPR_STATEMENT above it, if it's a // VAR then it should be directly replaced. Node vnode; boolean defined = referenceInfo.isWellDefined() && init.getParent().isVar(); if (defined) { vnode = init.getParent(); fillInitialValues(init, initvals); } else { // TODO(user): More test / rewrite this part. // Find the beginning of the function / script. vnode = v.getScope().getRootNode().getLastChild().getFirstChild(); } for (Map.Entry<String, String> entry : varmap.entrySet()) { Node val = initvals.get(entry.getKey()); Node varnode = NodeUtil.newVarNode(entry.getValue(), val); if (val == null) { // is this right? varnode.copyInformationFromForTree(vnode); } else { blacklistVarReferencesInTree(val, v.scope); } vnode.getParent().addChildBefore(varnode, vnode); compiler.reportChangeToEnclosingScope(vnode); } if (defined) { vnode.getParent().removeChild(vnode); } for (Reference ref : referenceInfo.references) { compiler.reportChangeToEnclosingScope(ref.getNode()); // The init/decl have already been converted. if (defined && ref == init) { continue; } if (ref.isLvalue()) { // Assignments have to be handled specially, since they // expand out into multiple assignments. replaceAssignmentExpression(v, ref, varmap); } else if (ref.getParent().isVar()) { // The old variable declaration. It didn't have a // value. Remove it entirely as it should now be unused. ref.getGrandparent().removeChild(ref.getParent()); } else { // Make sure that the reference is a GETPROP as we expect it to be. Node getprop = ref.getParent(); Preconditions.checkState(getprop.isGetProp()); // The key being looked up in the original map. String var = getprop.getChildAtIndex(1).getString(); // If the variable hasn't already been declared, add an empty // declaration near all the other declarations. Preconditions.checkState(varmap.containsKey(var)); // Replace the GETPROP node with a NAME. Node replacement = IR.name(varmap.get(var)); replacement.copyInformationFrom(getprop); ref.getGrandparent().replaceChild(ref.getParent(), replacement); } } } } }
/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is part of dcm4che, an implementation of DICOM(TM) in * Java(TM), available at http://sourceforge.net/projects/dcm4che. * * The Initial Developer of the Original Code is * TIANI Medgraph AG. * Portions created by the Initial Developer are Copyright (C) 2003-2005 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Gunter Zeilinger <gunter.zeilinger@tiani.com> * Franz Willer <franz.willer@gwi-ag.com> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package org.dcm4chex.wado.web; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; import javax.servlet.http.HttpServletRequest; import org.dcm4che.data.Dataset; import org.dcm4che.dict.Tags; import org.dcm4che.dict.UIDs; import org.dcm4cheri.util.StringUtils; import org.dcm4chex.wado.common.WADORequestObject; import org.dcm4chex.wado.mbean.WADOSupport; /** * @author franz.willer * * TODO To change the template for this generated type comment go to Window - * Preferences - Java - Code Style - Code Templates */ public class WADORequestObjectImpl extends BasicRequestObjectImpl implements WADORequestObject { private static final String ERROR_INVALID_REGION_FORMAT = "Error: region parameter is invalid! Must be a comma separated list of 4 decimal strings."; private static final String ERROR_INVALID_REGION_OUT_OF_RANGE = "Error: region parameter is invalid! Coordinates must be in range [0..1]."; private static final String ERROR_INVALID_REGION_DIMENSION = "Error: region parameter is invalid! Width and height of specified region must be > 0."; private static final String ERROR_NULL_WINDOW_WIDTH = "Error: windowWidth parameter is invalid! Must specify a value."; private static final String ERROR_NULL_WINDOW_CENTER = "Error: windowCenter parameter is invalid! Must specify a value."; private static final String ERROR_INVALID_WINDOW_WIDTH_TYPE = "Error: windowWidth parameter is invalid! Must be a decimal string."; private static final String ERROR_INVALID_WINDOW_CENTER_TYPE = "Error: windowCenter parameter is invalid! Must be a decimal string."; private static final String ERROR_INVALID_WINDOW_WIDTH_VALUE = "Error: windowWidth parameter is invalid! Width must be > 0."; private static final String ERROR_INVALID_IMAGE_QUALITY_TYPE = "Error: imageQuality parameter is invalid! Must be a integer string."; private static final String ERROR_INVALID_IMAGE_QUALITY_VALUE = "Error: imageQuality parameter is invalid! Quality must be in range [1..100]."; private String studyUID; private String seriesUID; private String instanceUID; private String rows; private String columns; private String frameNumber; private String transferSyntax; private String region; private String windowWidth; private String windowCenter; private String imageQuality; private List<String> contentTypes; private Dataset objectInfo; /** * Creates a WADORequestObjectImpl instance configured with http request. * * @param request * The http request. */ public WADORequestObjectImpl(HttpServletRequest request) { super(request); studyUID = request.getParameter("studyUID"); seriesUID = request.getParameter("seriesUID"); instanceUID = request.getParameter("objectUID"); // optional parameters - implemented String contentType = request.getParameter("contentType"); rows = request.getParameter("rows"); columns = request.getParameter("columns"); frameNumber = request.getParameter("frameNumber"); transferSyntax = request.getParameter("transferSyntax"); contentTypes = _string2List(contentType, ","); region = request.getParameter("region"); windowWidth = request.getParameter("windowWidth"); windowCenter = request.getParameter("windowCenter"); imageQuality = request.getParameter("imageQuality"); } /** * Returns the value of studyUID request parameter. * * @see org.dcm4chex.wado.common.WADORequestObject#getStudyUID() * * @return the studyUID. */ public String getStudyUID() { return objectInfo == null ? studyUID : objectInfo.getString(Tags.StudyInstanceUID); } /** * Returns the value of seriesUID request parameter. * * @see org.dcm4chex.wado.common.WADORequestObject#getSeriesUID() * * @return the seriesUID. */ public String getSeriesUID() { return objectInfo == null ? seriesUID : objectInfo.getString(Tags.SeriesInstanceUID); } /** * Returns the value of objectUID request parameter. * * @see org.dcm4chex.wado.common.WADORequestObject#getObjectUID() * * @return the objectUID */ public String getObjectUID() { return instanceUID; } /** * Returns the value of rows request parameter. * * @see org.dcm4chex.wado.common.WADORequestObject#getRows() * * @return the rows parameter (integer String) */ public String getRows() { return rows; } /** * Returns the value of columns request parameter. * * @see org.dcm4chex.wado.common.WADORequestObject#getColumns() * * @return the columns parameter (integer String) */ public String getColumns() { return columns; } /** * Returns the value of frameNumber request parameter. * * @see org.dcm4chex.wado.common.WADORequestObject#getFrameNumber() * * @return the frameNumber (integer String) */ public String getFrameNumber() { return frameNumber; } /** * Returns the list of requested content types from the contentType request * parameter. * <p> * The contentType param has one ore more content types seperated by ',' * character. * * @see org.dcm4chex.wado.common.WADORequestObject#getContentTypes() * * @return A list of requested content types */ public List<String> getContentTypes() { return contentTypes; } /** * Returns the transferSyntax parameter. * * @return Returns the transferSyntax. */ public String getTransferSyntax() { return transferSyntax; } /** * @return Returns the value of the region parameter. */ public String getRegion() { return region; } /** * @return Returns the value of the windowWidth parameter. */ public String getWindowWidth() { return windowWidth; } /** * @return Returns the value of the windowCenter parameter. */ public String getWindowCenter() { return windowCenter; } /** * @return Returns the value of the imageQuality parameter. */ public String getImageQuality() { return imageQuality; } /** * Checks this request object and returns an error code. * <p> * <DL> * <DT>Following checks:</DT> * <DD> requestType must be "WADO"</DD> * <DD> studyUID, seriesUID and objectUID must be set</DD> * <DD> if rows is set: check if it is parseable to int</DD> * <DD> if columns is set: check if it is parseable to int</DD> * <DD> if frameNumber is set: check if it is parseable to int</DD> * </DL> * * @return OK if it is a valid WADO request or an error code. */ public int checkRequest() { if (getRequestType() == null || !"WADO".equalsIgnoreCase(getRequestType()) || studyUID == null || seriesUID == null || instanceUID == null || instanceUID.trim().length() == 0 ) { setErrorMsg("Not a WADO URL!"); return INVALID_WADO_URL; } if (!checkUIDs()) { return INVALID_UID; } if (rows != null) { try { Integer.parseInt(rows); } catch (Exception x) { setErrorMsg("Error: rows parameter is invalid! Must be an integer string."); return INVALID_ROWS; } } if (columns != null) { try { Integer.parseInt(columns); } catch (Exception x) { setErrorMsg("Error: columns parameter is invalid! Must be an integer string."); return INVALID_COLUMNS; } } if (frameNumber != null) { try { Integer.parseInt(frameNumber); } catch (Exception x) { setErrorMsg("Error: frameNumber parameter is invalid! Must be an integer string."); return INVALID_FRAME_NUMBER; } } if (region != null) { try { checkRegion(region); } catch (IllegalArgumentException e) { setErrorMsg(e.getMessage()); return INVALID_REGION; } } if (windowWidth != null || windowCenter != null) { try { checkWindowLevel(windowWidth, windowCenter); } catch (Exception x) { setErrorMsg(x.getMessage()); return INVALID_WINDOW_LEVEL; } } if (imageQuality != null) { try { checkImageQuality(imageQuality); } catch (IllegalArgumentException e) { setErrorMsg(e.getMessage()); return INVALID_IMAGE_QUALITY; } } if (contentTypes != null) { for (String ct : contentTypes) { if (!WADOSupport.CONTENT_TYPES.contains(ct)) { setErrorMsg("Invalid contentType "+ct+"! Must be one of "+WADOSupport.CONTENT_TYPES); return INVALID_CONTENT_TYPE; } } } setErrorMsg(null); return OK; } /** * Checks that the region string's value is valid. Throws * <code>IllegalArgumentException</code> if it isn't. * * @param region * String representing a rectangular region of an image * * @return void * */ private void checkRegion(String region) { String[] ss = StringUtils.split(region, ','); if (ss.length != 4) { throw new IllegalArgumentException(ERROR_INVALID_REGION_FORMAT); } double[] ds = new double[4]; for (int i = 0; i < ds.length; i++) { try { ds[i] = Double.parseDouble(ss[i]); } catch (NumberFormatException e) { throw new IllegalArgumentException(ERROR_INVALID_REGION_FORMAT); } if (ds[i] < 0. || ds[i] > 1.) { throw new IllegalArgumentException( ERROR_INVALID_REGION_OUT_OF_RANGE); } } if (!(ds[0] < ds[2] && ds[1] < ds[3])) { throw new IllegalArgumentException(ERROR_INVALID_REGION_DIMENSION); } } /** * Checks that the windowWidth & windowCenter values are valid. Throws * <code>IllegalArgumentException</code> if either isn't. * * @param windowWidth * The value of the windowWidth WADO parameter * @param windowCenter * The value of the windowCenter WADO parameter * * @return void * */ private void checkWindowLevel(String windowWidth, String windowCenter) throws IllegalArgumentException { if (windowWidth == null) throw new IllegalArgumentException(ERROR_NULL_WINDOW_WIDTH); if (windowCenter == null) throw new IllegalArgumentException(ERROR_NULL_WINDOW_CENTER); double width = -1; try { width = Double.parseDouble(windowWidth); } catch (NumberFormatException e) { throw new IllegalArgumentException(ERROR_INVALID_WINDOW_WIDTH_TYPE); } if (width <= 0) throw new IllegalArgumentException(ERROR_INVALID_WINDOW_WIDTH_VALUE); try { Double.parseDouble(windowCenter); } catch (NumberFormatException e) { throw new IllegalArgumentException(ERROR_INVALID_WINDOW_CENTER_TYPE); } } private void checkImageQuality(String imageQuality) { int quality = -1; try { quality = Integer.parseInt(imageQuality); } catch (NumberFormatException e) { throw new IllegalArgumentException(ERROR_INVALID_IMAGE_QUALITY_TYPE); } if (quality <= 0 || quality > 100) throw new IllegalArgumentException( ERROR_INVALID_IMAGE_QUALITY_VALUE); } private boolean checkUIDs() { // studyUID and seriesUID are not used in query! So check UID only if length > 2 to allow short URLs if (studyUID.length() > 2 && !isUID(studyUID)) { this.setErrorMsg("Invalid studyUID parameter!"); return false; } if (seriesUID.length() > 2 && !isUID(seriesUID)) { this.setErrorMsg("Invalid seriesUID parameter!"); return false; } if (!isUID(instanceUID)) { this.setErrorMsg("Invalid objetcUID parameter!"); return false; } if (transferSyntax != null && !UIDs.isValid(transferSyntax)) { this.setErrorMsg("Invalid transferSyntax parameter! Not a valid UID"); return false; } return true; } public static boolean isUID(String uid) { if (uid == null || uid.length() < 3 || uid.charAt(0) == '.') return false; char[] a = uid.toCharArray(); boolean expectDigit = true; for (int i = 0; i < a.length; ++i) { if (a[i] == '.') { expectDigit = true; } else if (a[i] > '9' || a[i] < '0') { return false; } else { expectDigit = false; } } return !expectDigit; } /** * Seperate the given String with delim character and return a List of the * items. * * @param s * String with one or more items seperated with a character. * @param delim * The delimiter charecter. * @return A List with the seperated items */ private List<String> _string2List(String s, String delim) { if (s == null) return null; StringTokenizer st = new StringTokenizer(s, delim); List<String> l = new ArrayList<String>(); while (st.hasMoreTokens()) { l.add(st.nextToken().trim()); } return l; } /** * Returns a short description of this request. * <p> * * @return String representation of this request. */ public String toString() { StringBuffer sb = new StringBuffer(); sb.append("WADO request:"); Iterator iter = paramMap.keySet().iterator(); Object key; while (iter.hasNext()) { key = iter.next(); sb.append("&").append(key).append("=").append( ((String[]) paramMap.get(key))[0]); } return sb.toString(); } public boolean isExcludePrivate() { return "no".equalsIgnoreCase(request.getParameter("privateTags")); } public String getSimpleFrameList() { return request.getParameter("simpleFrameList"); } public String getCalculatedFrameList() { return request.getParameter("calculatedFrameList"); } public Dataset getObjectInfo() { return objectInfo; } public void setObjectInfo(Dataset objectInfo) { this.objectInfo = objectInfo; } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.model; import com.facebook.buck.util.BuckConstant; import com.facebook.buck.util.HumanReadableException; import com.google.common.base.Optional; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Sets; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Set; /** * Static helpers for working with build targets. */ public class BuildTargets { /** Utility class: do not instantiate. */ private BuildTargets() {} /** * Return a path to a file in the buck-out/bin/ directory. {@code format} will be prepended with * the {@link com.facebook.buck.util.BuckConstant#SCRATCH_DIR} and the target base path, then * formatted with the target short name. * * @param target The {@link BuildTarget} to scope this path to. * @param format {@link String#format} string for the path name. It should contain one "%s", * which will be filled in with the rule's short name. It should not start with a slash. * @return A {@link java.nio.file.Path} under buck-out/bin, scoped to the base path of * {@code target}. */ public static Path getScratchPath(BuildTarget target, String format) { return Paths.get( String.format( "%s/%s" + format, BuckConstant.SCRATCH_DIR, target.getBasePathWithSlash(), target.getShortNameAndFlavorPostfix())); } /** * Return a path to a file in the buck-out/bin/ directory. {@code format} will be prepended with * the {@link com.facebook.buck.util.BuckConstant#SCRATCH_DIR} and the target base path, then * formatted with the target short name. * * @param target The {@link UnflavoredBuildTarget} to scope this path to. * @param format {@link String#format} string for the path name. It should contain one "%s", * which will be filled in with the rule's short name. It should not start with a slash. * @return A {@link java.nio.file.Path} under buck-out/bin, scoped to the base path of * {@code target}. */ public static Path getScratchPath(UnflavoredBuildTarget target, String format) { return Paths.get( String.format( "%s/%s" + format, BuckConstant.SCRATCH_DIR, target.getBasePathWithSlash(), target.getShortName())); } /** * Return a path to a file in the buck-out/gen/ directory. {@code format} will be prepended with * the {@link com.facebook.buck.util.BuckConstant#GEN_DIR} and the target base path, then * formatted with the target short name. * * @param target The {@link BuildTarget} to scope this path to. * @param format {@link String#format} string for the path name. It should contain one "%s", * which will be filled in with the rule's short name. It should not start with a slash. * @return A {@link java.nio.file.Path} under buck-out/gen, scoped to the base path of * {@code target}. */ public static Path getGenPath(BuildTarget target, String format) { return Paths.get(String.format("%s/%s" + format, BuckConstant.GEN_DIR, target.getBasePathWithSlash(), target.getShortNameAndFlavorPostfix())); } /** * Return a path to a file in the buck-out/gen/ directory. {@code format} will be prepended with * the {@link com.facebook.buck.util.BuckConstant#GEN_DIR} and the target base path, then * formatted with the target short name. * * @param target The {@link UnflavoredBuildTarget} to scope this path to. * @param format {@link String#format} string for the path name. It should contain one "%s", * which will be filled in with the rule's short name. It should not start with a slash. * @return A {@link java.nio.file.Path} under buck-out/gen, scoped to the base path of * {@code target}. */ public static Path getGenPath(UnflavoredBuildTarget target, String format) { return Paths.get(String.format("%s/%s" + format, BuckConstant.GEN_DIR, target.getBasePathWithSlash(), target.getShortName())); } /** * Takes the {@link BuildTarget} for {@code hasBuildTarget} and derives a new {@link BuildTarget} * from it with the specified flavor. * @throws IllegalArgumentException if the original {@link BuildTarget} already has a flavor. */ public static BuildTarget createFlavoredBuildTarget( UnflavoredBuildTarget buildTarget, Flavor flavor) { return BuildTarget.builder(buildTarget) .addFlavors(flavor) .build(); } /** * Returns whether the {@link BuildTarget} `target` is visible to the {@link BuildTarget} `other` * using the given visibility patterns. */ public static boolean isVisibleTo( BuildTarget target, ImmutableSet<BuildTargetPattern> visibilityPatterns, BuildTarget other) { // Targets in the same build file are always visible to each other. if (target.getBaseName().equals(other.getBaseName())) { return true; } for (BuildTargetPattern pattern : visibilityPatterns) { if (pattern.apply(other)) { return true; } } return false; } /** * Propagate flavors represented by the given {@link FlavorDomain} objects from a parent * target to it's dependencies. */ public static ImmutableSortedSet<BuildTarget> propagateFlavorDomains( BuildTarget target, Iterable<FlavorDomain<?>> domains, Iterable<BuildTarget> deps) { Set<Flavor> flavors = Sets.newHashSet(); // For each flavor domain, extract the corresponding flavor from the parent target and // verify that each dependency hasn't already set this flavor. for (FlavorDomain<?> domain : domains) { // Now extract all relevant domain flavors from our parent target. Optional<Flavor> flavor; try { flavor = domain.getFlavor(ImmutableSet.copyOf(target.getFlavors())); } catch (FlavorDomainException e) { throw new HumanReadableException("%s: %s", target, e.getMessage()); } if (!flavor.isPresent()) { throw new HumanReadableException( "%s: no flavor for \"%s\"", target, domain.getName()); } flavors.add(flavor.get()); // First verify that our deps are not already flavored for our given domains. for (BuildTarget dep : deps) { Optional<Flavor> depFlavor; try { depFlavor = domain.getFlavor(ImmutableSet.copyOf(dep.getFlavors())); } catch (FlavorDomainException e) { throw new HumanReadableException("%s: dep %s: %s", target, dep, e.getMessage()); } if (depFlavor.isPresent()) { throw new HumanReadableException( "%s: dep %s already has flavor for \"%s\" : %s", target, dep, domain.getName(), flavor.get()); } } } ImmutableSortedSet.Builder<BuildTarget> flavoredDeps = ImmutableSortedSet.naturalOrder(); // Now flavor each dependency with the relevant flavors. for (BuildTarget dep : deps) { flavoredDeps.add(BuildTarget.builder(dep).addAllFlavors(flavors).build()); } return flavoredDeps.build(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.nodetype.write; import static com.google.common.base.Preconditions.checkNotNull; import static org.apache.jackrabbit.JcrConstants.JCR_DEFAULTVALUES; import static org.apache.jackrabbit.JcrConstants.JCR_MULTIPLE; import static org.apache.jackrabbit.JcrConstants.JCR_REQUIREDTYPE; import static org.apache.jackrabbit.JcrConstants.JCR_VALUECONSTRAINTS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_AVAILABLE_QUERY_OPERATORS; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_FULLTEXT_SEARCHABLE; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.JCR_IS_QUERY_ORDERABLE; import static org.apache.jackrabbit.oak.plugins.nodetype.NodeTypeConstants.RESIDUAL_NAME; import java.util.Arrays; import java.util.Locale; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.nodetype.ConstraintViolationException; import javax.jcr.nodetype.PropertyDefinition; import javax.jcr.nodetype.PropertyDefinitionTemplate; import javax.jcr.query.qom.QueryObjectModelConstants; import javax.jcr.version.OnParentVersionAction; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.namepath.NameMapper; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; class PropertyDefinitionTemplateImpl extends ItemDefinitionTemplate implements PropertyDefinitionTemplate { private static final String[] ALL_OPERATORS = new String[]{ QueryObjectModelConstants.JCR_OPERATOR_EQUAL_TO, QueryObjectModelConstants.JCR_OPERATOR_GREATER_THAN, QueryObjectModelConstants.JCR_OPERATOR_GREATER_THAN_OR_EQUAL_TO, QueryObjectModelConstants.JCR_OPERATOR_LESS_THAN, QueryObjectModelConstants.JCR_OPERATOR_LESS_THAN_OR_EQUAL_TO, QueryObjectModelConstants.JCR_OPERATOR_LIKE, QueryObjectModelConstants.JCR_OPERATOR_NOT_EQUAL_TO }; private int requiredType = PropertyType.STRING; private boolean isMultiple = false; private boolean fullTextSearchable = true; private boolean queryOrderable = true; private String[] queryOperators = ALL_OPERATORS; private String[] valueConstraints = null; private Value[] defaultValues = null; PropertyDefinitionTemplateImpl(NameMapper mapper) { super(mapper); } PropertyDefinitionTemplateImpl( NameMapper mapper, PropertyDefinition definition) throws ConstraintViolationException { super(mapper, definition); setRequiredType(definition.getRequiredType()); setMultiple(definition.isMultiple()); setFullTextSearchable(definition.isFullTextSearchable()); setQueryOrderable(definition.isQueryOrderable()); setAvailableQueryOperators(definition.getAvailableQueryOperators()); setValueConstraints(definition.getValueConstraints()); setDefaultValues(definition.getDefaultValues()); } /** * Writes the contents of this property definition to the given tree node. * Used when registering new node types. * * @param tree an {@code nt:propertyDefinition} node * @throws RepositoryException if this definition could not be written */ @Override void writeTo(Tree tree) throws RepositoryException { super.writeTo(tree); tree.setProperty( JCR_REQUIREDTYPE, PropertyType.nameFromValue(requiredType).toUpperCase(Locale.ENGLISH)); tree.setProperty(JCR_MULTIPLE, isMultiple); tree.setProperty(JCR_IS_FULLTEXT_SEARCHABLE, fullTextSearchable); tree.setProperty(JCR_IS_QUERY_ORDERABLE, queryOrderable); tree.setProperty( JCR_AVAILABLE_QUERY_OPERATORS, Arrays.asList(queryOperators), Type.NAMES); // TODO: mapping? if (valueConstraints != null) { tree.setProperty( JCR_VALUECONSTRAINTS, Arrays.asList(valueConstraints), Type.STRINGS); } else { tree.removeProperty(JCR_VALUECONSTRAINTS); } if (defaultValues != null) { tree.setProperty(PropertyStates.createProperty( JCR_DEFAULTVALUES, Arrays.asList(defaultValues))); } else { tree.removeProperty(JCR_DEFAULTVALUES); } } //------------------------------------------------------------< public >-- @Override public int getRequiredType() { return requiredType; } @Override public void setRequiredType(int type) { PropertyType.nameFromValue(type); // validation this.requiredType = type; } @Override public boolean isMultiple() { return isMultiple; } @Override public void setMultiple(boolean isMultiple) { this.isMultiple = isMultiple; } @Override public boolean isFullTextSearchable() { return fullTextSearchable; } @Override public void setFullTextSearchable(boolean fullTextSearchable) { this.fullTextSearchable = fullTextSearchable; } @Override public boolean isQueryOrderable() { return queryOrderable; } @Override public void setQueryOrderable(boolean queryOrderable) { this.queryOrderable = queryOrderable; } @Override public String[] getAvailableQueryOperators() { return queryOperators; } @Override public void setAvailableQueryOperators(String[] operators) { checkNotNull(operators); this.queryOperators = new String[operators.length]; System.arraycopy(operators, 0, this.queryOperators, 0, operators.length); } @Override public String[] getValueConstraints() { return valueConstraints; // no problem if modified by client } @Override public void setValueConstraints(String[] constraints) { if (constraints == null) { this.valueConstraints = null; } else { this.valueConstraints = new String[constraints.length]; System.arraycopy( constraints, 0, valueConstraints, 0, constraints.length); } } @Override public Value[] getDefaultValues() { return defaultValues; // no problem if modified by client } @Override public void setDefaultValues(Value[] values) { if (values == null) { this.defaultValues = null; } else { this.defaultValues = new Value[values.length]; System.arraycopy(values, 0, defaultValues, 0, values.length); } } //------------------------------------------------------------< Object >-- @Override public String toString() { StringBuilder builder = new StringBuilder("- "); if (getOakName() == null) { builder.append(RESIDUAL_NAME); } else { builder.append(getOakName()); } if (requiredType != PropertyType.STRING) { builder.append(" ("); builder.append(Type.fromTag(requiredType, false).toString()); builder.append(")"); } if (isAutoCreated()) { builder.append(" a"); } if (isProtected()) { builder.append(" p"); } if (isMandatory()) { builder.append(" m"); } if (getOnParentVersion() != OnParentVersionAction.COPY) { builder.append(" "); builder.append(OnParentVersionAction.nameFromValue(getOnParentVersion())); } return builder.toString(); } }
package com.uc4.ara.feature.rm; import java.util.ArrayList; import java.util.List; import com.uc4.ara.feature.globalcodes.ErrorCodes; import com.uc4.ara.feature.utils.CmdLineParser; import com.uc4.importexportservice.ArrayOfString; import com.uc4.importexportservice.structure.Entity; import com.uc4.importexportservice.structure.MainType; import com.uc4.importexportservice.structure.Property; import com.uc4.importexportservice.structure.Sync; public class ChangeDeploymentProfileTarget extends AbstractDeploymentProfileFeature { private CmdLineParser.Option<String> deploymentTargetArg; private CmdLineParser.Option<String> componentArg; private CmdLineParser.Option<Boolean> deleteArg; private List<Component> cmpnts; private List<DeploymentTarget> deploymentTargets; private Property createProperty(String name, String value) { Property p = new Property(); p.setName(name); p.setValue(value); p.setIsIdentity(true); return p; } private Sync buildEntitiesData() throws Exception { Sync sync = new Sync(); for (Component cmpnt : cmpnts) { for (DeploymentTarget deploymentTarget : deploymentTargets) { if (!deploymentTarget.isArchive()) { if (cmpnt.getCustomType().equalsIgnoreCase( deploymentTarget.getCustomType())) { Entity entity = new Entity(); entity.setMainType(MainType.DEPLOYMENT_PROFILE_TARGET); sync.getEntity().add(entity); entity.getProperty().add( createProperty( "system_deployment_profile.system_id", profileValue)); entity.getProperty().add( createProperty("system_application.system_id", appValue)); entity.getProperty().add( createProperty("system_component.system_id", Long.toString(cmpnt.getSystemId()))); entity.getProperty().add( createProperty( "system_deployment_target.system_id", Long.toString(deploymentTarget .getSystemId()))); } } } } return sync; } private List<DeploymentTarget> getDeploymentProfileTargets() throws Exception { List<DeploymentTarget> deploymentTarget = new ArrayList<DeploymentTarget>(); ArrayOfString conditions = new ArrayOfString(); ArrayOfString properties = new ArrayOfString(); properties.getString().add("system_deployment_target.system_id"); conditions.getString().add( "system_environment.system_id eq '" + environmentValue + "'"); Sync sync = wsUtil.exportEntities( MainType.ENVIRONMENT_DEPLOYMENT_TARGET_RELATION, properties, conditions); List<String> target = new ArrayList<String>(); for (Entity e : sync.getEntity()) { target.add(e.getProperty().get(0).getValue()); } // only check if system_archived = true for (String t : target) { conditions.getString().clear(); properties.getString().clear(); properties.getString().add("system_id"); properties.getString().add("system_name"); properties.getString().add("system_archived"); conditions.getString().add("system_id eq '" + t + "'"); sync = wsUtil.exportEntities(MainType.DEPLOYMENT_TARGET, properties, conditions); Entity e = sync.getEntity().get(0); DeploymentTarget dtgt = new DeploymentTarget(); dtgt.setSystemId(Long.parseLong(e.getProperty().get(0).getValue())); dtgt.setSystemName(e.getProperty().get(1).getValue()); dtgt.setArchive(e.getProperty().get(2).getValue() .equalsIgnoreCase("true")); dtgt.setCustomType(e.getCustomType()); deploymentTarget.add(dtgt); } return deploymentTarget; } private List<Component> getDeploymentProfileComponents() throws Exception { ArrayOfString conditions = new ArrayOfString(); ArrayOfString properties = new ArrayOfString(); properties.getString().add("system_id"); properties.getString().add("system_name"); conditions.getString().add( "system_application.system_id eq '" + appValue + "'"); Sync sync = wsUtil.exportEntities(MainType.COMPONENT, properties, conditions); List<Component> cmpnt = new ArrayList<Component>(); for (Entity e : sync.getEntity()) { Component c = new Component(); long id = Long.parseLong(e.getProperty().get(0).getValue(), 10); c.setSystemId(id); c.setSystemName(e.getProperty().get(1).getValue()); c.setCustomType(e.getCustomType()); cmpnt.add(c); } return cmpnt; } @Override public int run(String[] args) throws Exception { super.run(args); String cmpntValue = parser.getOptionValue(componentArg); String deploymentTargetValue = parser .getOptionValue(deploymentTargetArg); Boolean isDelete = parser.getOptionValue(deleteArg) == null ? false : true; cmpnts = getDeploymentProfileComponents(); deploymentTargets = getDeploymentProfileTargets(); // validate deployment target name if ((deploymentTargetValue != null) && (!deploymentTargetValue.isEmpty())) { boolean found = false; DeploymentTarget t = null; for (DeploymentTarget deploymentTarget : deploymentTargets) { if (deploymentTargetValue.equalsIgnoreCase(deploymentTarget .getSystemName()) || (deploymentTargetValue.equalsIgnoreCase(Long .toString(deploymentTarget.getSystemId())))) { found = true; t = deploymentTarget; break; } } if (!found) { throw new IllegalArgumentException("The deployment target " + deploymentTargetValue + " wasn't assigned to environment " + environmentValue); } if (t.isArchive()) { throw new IllegalArgumentException("The deployment target " + deploymentTargetValue + " was archived"); } deploymentTargets.clear(); deploymentTargets.add(t); } // validate component names if ((cmpntValue != null) && (!cmpntValue.isEmpty())) { String[] cmpntName = cmpntValue.split(","); List<Component> lcmpnt = new ArrayList<Component>(); for (int i = 0; i < cmpntName.length; i++) { cmpntName[i] = cmpntName[i].trim(); boolean found = false; for (Component cmpnt : cmpnts) { if (cmpntName[i].equalsIgnoreCase(cmpnt.getSystemName()) || cmpntName[i].equalsIgnoreCase(Long .toString(cmpnt.getSystemId()))) { found = true; lcmpnt.add(cmpnt); break; } } if (!found) { throw new IllegalArgumentException("The component " + cmpntName[i] + " wasn't belong to the application " + appValue); } } cmpnts.clear(); cmpnts = lcmpnt; } Sync sync = buildEntitiesData(); Sync currentSync = backupDeploymentProfileTarget(); if (isDelete) { sync.getEntity().retainAll(currentSync.getEntity()); int size = sync.getEntity().size(); System.out.println("UC4RB_OUT_REMOVED|" + size); if (size > 0) { wsUtil.deleteEntities(sync); } } else { sync.getEntity().removeAll(currentSync.getEntity()); int size = sync.getEntity().size(); System.out.println("UC4RB_OUT_ASSIGNED|" + size); if (size > 0) { wsUtil.importEntities(sync, MainType.DEPLOYMENT_PROFILE_TARGET); } } return ErrorCodes.OK; } @Override public void initialize() { super.initialize(); deploymentTargetArg = parser .addHelp( parser.addStringOption("tgt", "deploymentTarget", false), "The name or ID of the target to be removed. If empty all targets will be assigned/removed for the given component."); componentArg = parser .addHelp( parser.addStringOption("cmpnt", "component", false), "Comma separated list of component names, if empty auto assignment/removment is done for all components"); deleteArg = parser .addHelp( parser.addBooleanOption("d", "delete", false), "If specified, the function will remove targets from components else it will add"); } }
/* * Copyright 2012-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.health; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; import org.assertj.core.api.ThrowableAssert.ThrowingCallable; import reactor.core.publisher.Mono; import org.springframework.boot.actuate.endpoint.ApiVersion; import org.springframework.boot.actuate.endpoint.web.test.WebEndpointTest; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication; import org.springframework.boot.autoconfigure.condition.ConditionalOnWebApplication.Type; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.test.web.reactive.server.WebTestClient; import org.springframework.util.ReflectionUtils; /** * Integration tests for {@link HealthEndpoint} and {@link HealthEndpointWebExtension} * exposed by Jersey, Spring MVC, and WebFlux. * * @author Andy Wilkinson * @author Phillip Webb */ class HealthEndpointWebIntegrationTests { private static final String V2_JSON = ApiVersion.V2.getProducedMimeType().toString(); private static final String V3_JSON = ApiVersion.V3.getProducedMimeType().toString(); @WebEndpointTest void whenHealthIsUp200ResponseIsReturned(WebTestClient client) { client.get().uri("/actuator/health").accept(MediaType.APPLICATION_JSON).exchange().expectStatus().isOk() .expectBody().jsonPath("status").isEqualTo("UP").jsonPath("components.alpha.status").isEqualTo("UP") .jsonPath("components.bravo.status").isEqualTo("UP"); } @WebEndpointTest void whenHealthIsUpAndAcceptsV3Request200ResponseIsReturned(WebTestClient client) { client.get().uri("/actuator/health").headers((headers) -> headers.set(HttpHeaders.ACCEPT, V3_JSON)).exchange() .expectStatus().isOk().expectBody().jsonPath("status").isEqualTo("UP") .jsonPath("components.alpha.status").isEqualTo("UP").jsonPath("components.bravo.status") .isEqualTo("UP"); } @WebEndpointTest void whenHealthIsUpAndAcceptsAllRequest200ResponseIsReturned(WebTestClient client) { client.get().uri("/actuator/health").headers((headers) -> headers.set(HttpHeaders.ACCEPT, "*/*")).exchange() .expectStatus().isOk().expectBody().jsonPath("status").isEqualTo("UP") .jsonPath("components.alpha.status").isEqualTo("UP").jsonPath("components.bravo.status") .isEqualTo("UP"); } @WebEndpointTest void whenHealthIsUpAndV2Request200ResponseIsReturnedInV2Format(WebTestClient client) { client.get().uri("/actuator/health").headers((headers) -> headers.set(HttpHeaders.ACCEPT, V2_JSON)).exchange() .expectStatus().isOk().expectBody().jsonPath("status").isEqualTo("UP").jsonPath("details.alpha.status") .isEqualTo("UP").jsonPath("details.bravo.status").isEqualTo("UP"); } @WebEndpointTest void whenHealthIsDown503ResponseIsReturned(ApplicationContext context, WebTestClient client) { HealthIndicator healthIndicator = () -> Health.down().build(); ReactiveHealthIndicator reactiveHealthIndicator = () -> Mono.just(Health.down().build()); withHealthContributor(context, "charlie", healthIndicator, reactiveHealthIndicator, () -> client.get().uri("/actuator/health").accept(MediaType.APPLICATION_JSON).exchange().expectStatus() .isEqualTo(HttpStatus.SERVICE_UNAVAILABLE).expectBody().jsonPath("status").isEqualTo("DOWN") .jsonPath("components.alpha.status").isEqualTo("UP").jsonPath("components.bravo.status") .isEqualTo("UP").jsonPath("components.charlie.status").isEqualTo("DOWN")); } @WebEndpointTest void whenComponentHealthIsDown503ResponseIsReturned(ApplicationContext context, WebTestClient client) { HealthIndicator healthIndicator = () -> Health.down().build(); ReactiveHealthIndicator reactiveHealthIndicator = () -> Mono.just(Health.down().build()); withHealthContributor(context, "charlie", healthIndicator, reactiveHealthIndicator, () -> client.get().uri("/actuator/health/charlie").accept(MediaType.APPLICATION_JSON).exchange() .expectStatus().isEqualTo(HttpStatus.SERVICE_UNAVAILABLE).expectBody().jsonPath("status") .isEqualTo("DOWN")); } @WebEndpointTest void whenComponentInstanceHealthIsDown503ResponseIsReturned(ApplicationContext context, WebTestClient client) { HealthIndicator healthIndicator = () -> Health.down().build(); CompositeHealthContributor composite = CompositeHealthContributor .fromMap(Collections.singletonMap("one", healthIndicator)); ReactiveHealthIndicator reactiveHealthIndicator = () -> Mono.just(Health.down().build()); CompositeReactiveHealthContributor reactiveComposite = CompositeReactiveHealthContributor .fromMap(Collections.singletonMap("one", reactiveHealthIndicator)); withHealthContributor(context, "charlie", composite, reactiveComposite, () -> client.get().uri("/actuator/health/charlie/one").accept(MediaType.APPLICATION_JSON).exchange() .expectStatus().isEqualTo(HttpStatus.SERVICE_UNAVAILABLE).expectBody().jsonPath("status") .isEqualTo("DOWN")); } private void withHealthContributor(ApplicationContext context, String name, HealthContributor healthContributor, ReactiveHealthContributor reactiveHealthContributor, ThrowingCallable callable) { HealthContributorRegistry healthContributorRegistry = getContributorRegistry(context, HealthContributorRegistry.class); healthContributorRegistry.registerContributor(name, healthContributor); ReactiveHealthContributorRegistry reactiveHealthContributorRegistry = getContributorRegistry(context, ReactiveHealthContributorRegistry.class); if (reactiveHealthContributorRegistry != null) { reactiveHealthContributorRegistry.registerContributor(name, reactiveHealthContributor); } try { callable.call(); } catch (Throwable ex) { ReflectionUtils.rethrowRuntimeException(ex); } finally { healthContributorRegistry.unregisterContributor(name); if (reactiveHealthContributorRegistry != null) { reactiveHealthContributorRegistry.unregisterContributor(name); } } } private <R extends ContributorRegistry<?>> R getContributorRegistry(ApplicationContext context, Class<R> registryType) { return context.getBeanProvider(registryType).getIfAvailable(); } @WebEndpointTest void whenHealthIndicatorIsRemovedResponseIsAltered(WebTestClient client, ApplicationContext context) { String name = "bravo"; HealthContributorRegistry healthContributorRegistry = getContributorRegistry(context, HealthContributorRegistry.class); HealthContributor bravo = healthContributorRegistry.unregisterContributor(name); ReactiveHealthContributorRegistry reactiveHealthContributorRegistry = getContributorRegistry(context, ReactiveHealthContributorRegistry.class); ReactiveHealthContributor reactiveBravo = (reactiveHealthContributorRegistry != null) ? reactiveHealthContributorRegistry.unregisterContributor(name) : null; try { client.get().uri("/actuator/health").accept(MediaType.APPLICATION_JSON).exchange().expectStatus().isOk() .expectBody().jsonPath("status").isEqualTo("UP").jsonPath("components.alpha.status").isEqualTo("UP") .jsonPath("components.bravo.status").doesNotExist(); } finally { healthContributorRegistry.registerContributor(name, bravo); if (reactiveHealthContributorRegistry != null && reactiveBravo != null) { reactiveHealthContributorRegistry.registerContributor(name, reactiveBravo); } } } @Configuration(proxyBeanMethods = false) static class TestConfiguration { @Bean HealthContributorRegistry healthContributorRegistry(Map<String, HealthContributor> healthContributorBeans) { return new DefaultHealthContributorRegistry(healthContributorBeans); } @Bean @ConditionalOnWebApplication(type = Type.REACTIVE) ReactiveHealthContributorRegistry reactiveHealthContributorRegistry( Map<String, HealthContributor> healthContributorBeans, Map<String, ReactiveHealthContributor> reactiveHealthContributorBeans) { Map<String, ReactiveHealthContributor> allIndicators = new LinkedHashMap<>(reactiveHealthContributorBeans); healthContributorBeans.forEach((name, contributor) -> allIndicators.computeIfAbsent(name, (key) -> ReactiveHealthContributor.adapt(contributor))); return new DefaultReactiveHealthContributorRegistry(allIndicators); } @Bean HealthEndpoint healthEndpoint(HealthContributorRegistry healthContributorRegistry, HealthEndpointGroups healthEndpointGroups) { return new HealthEndpoint(healthContributorRegistry, healthEndpointGroups); } @Bean @ConditionalOnWebApplication(type = Type.SERVLET) HealthEndpointWebExtension healthWebEndpointExtension(HealthContributorRegistry healthContributorRegistry, HealthEndpointGroups healthEndpointGroups) { return new HealthEndpointWebExtension(healthContributorRegistry, healthEndpointGroups); } @Bean @ConditionalOnWebApplication(type = Type.REACTIVE) ReactiveHealthEndpointWebExtension reactiveHealthWebEndpointExtension( ReactiveHealthContributorRegistry reactiveHealthContributorRegistry, HealthEndpointGroups healthEndpointGroups) { return new ReactiveHealthEndpointWebExtension(reactiveHealthContributorRegistry, healthEndpointGroups); } @Bean HealthEndpointGroups healthEndpointGroups() { TestHealthEndpointGroup primary = new TestHealthEndpointGroup(); TestHealthEndpointGroup allTheAs = new TestHealthEndpointGroup((name) -> name.startsWith("a")); return HealthEndpointGroups.of(primary, Collections.singletonMap("alltheas", allTheAs)); } @Bean HealthIndicator alphaHealthIndicator() { return () -> Health.up().build(); } @Bean HealthIndicator bravoHealthIndicator() { return () -> Health.up().build(); } } }
/** * Copyright (C) 2004-2009 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.session; import java.net.UnknownHostException; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.jivesoftware.openfire.Connection; import org.jivesoftware.openfire.SessionManager; import org.jivesoftware.openfire.StreamID; import org.jivesoftware.openfire.auth.UnauthorizedException; import org.jivesoftware.openfire.interceptor.InterceptorManager; import org.jivesoftware.openfire.interceptor.PacketRejectedException; import org.jivesoftware.util.LocaleUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmpp.packet.JID; import org.xmpp.packet.Packet; /** * The session represents a connection between the server and a client (c2s) or * another server (s2s) as well as a connection with a component. Authentication and * user accounts are associated with c2s connections while s2s has an optional authentication * association but no single user user.<p> * * Obtain object managers from the session in order to access server resources. * * @author Gaston Dombiak */ public abstract class LocalSession implements Session { private static final Logger Log = LoggerFactory.getLogger(LocalSession.class); /** * The utf-8 charset for decoding and encoding Jabber packet streams. */ protected static String CHARSET = "UTF-8"; /** * The Address this session is authenticated as. */ protected JID address; /** * The stream id for this session (random and unique). */ private StreamID streamID; /** * The current session status. */ protected int status = STATUS_CONNECTED; /** * The connection that this session represents. */ protected Connection conn; protected SessionManager sessionManager; private String serverName; private long startDate = System.currentTimeMillis(); protected long lastActiveDate; protected long clientPacketCount = 0; protected long serverPacketCount = 0; /** * Session temporary data. All data stored in this <code>Map</code> disapear when session * finishes. */ private final Map<String, Object> sessionData = new HashMap<String, Object>(); /** * Creates a session with an underlying connection and permission protection. * * @param serverName domain of the XMPP server where the new session belongs. * @param connection The connection we are proxying. * @param streamID unique identifier for this session. */ public LocalSession(String serverName, Connection connection, StreamID streamID) { conn = connection; this.streamID = streamID; this.serverName = serverName; String id = streamID.getID(); this.address = new JID(null, serverName, id, true); this.sessionManager = SessionManager.getInstance(); } /** * Obtain the address of the user. The address is used by services like the core * server packet router to determine if a packet should be sent to the handler. * Handlers that are working on behalf of the server should use the generic server * hostname address (e.g. server.com). * * @return the address of the packet handler. */ public JID getAddress() { return address; } /** * Sets the new address of this session. The address is used by services like the core * server packet router to determine if a packet should be sent to the handler. * Handlers that are working on behalf of the server should use the generic server * hostname address (e.g. server.com). * * @param address the new address of this session. */ public void setAddress(JID address){ this.address = address; } /** * Returns the connection associated with this Session. * * @return The connection for this session */ public Connection getConnection() { return conn; } /** * Obtain the current status of this session. * * @return The status code for this session */ public int getStatus() { return status; } /** * Set the new status of this session. Setting a status may trigger * certain events to occur (setting a closed status will close this * session). * * @param status The new status code for this session */ public void setStatus(int status) { this.status = status; } /** * Obtain the stream ID associated with this sesison. Stream ID's are generated by the server * and should be unique and random. * * @return This session's assigned stream ID */ public StreamID getStreamID() { return streamID; } /** * Obtain the name of the server this session belongs to. * * @return the server name. */ public String getServerName() { return serverName; } /** * Obtain the date the session was created. * * @return the session's creation date. */ public Date getCreationDate() { return new Date(startDate); } /** * Obtain the time the session last had activity. * * @return The last time the session received activity. */ public Date getLastActiveDate() { return new Date(lastActiveDate); } /** * Increments the number of packets sent from the client to the server. */ public void incrementClientPacketCount() { clientPacketCount++; lastActiveDate = System.currentTimeMillis(); } /** * Increments the number of packets sent from the server to the client. */ public void incrementServerPacketCount() { serverPacketCount++; lastActiveDate = System.currentTimeMillis(); } /** * Obtain the number of packets sent from the client to the server. * * @return The number of packets sent from the client to the server. */ public long getNumClientPackets() { return clientPacketCount; } /** * Obtain the number of packets sent from the server to the client. * * @return The number of packets sent from the server to the client. */ public long getNumServerPackets() { return serverPacketCount; } /** * Saves given session data. Data are saved to temporary storage only and are accessible during * this session life only and only from this session instance. * * @param key a <code>String</code> value of stored data key ID. * @param value a <code>Object</code> value of data stored in session. * @see #getSessionData(String) */ public void setSessionData(String key, Object value) { synchronized (sessionData) { sessionData.put(key, value); } } /** * Retrieves session data. This method gives access to temporary session data only. You can * retrieve earlier saved data giving key ID to receive needed value. Please see * {@link #setSessionData(String, Object)} description for more details. * * @param key a <code>String</code> value of stored data ID. * @return a <code>Object</code> value of data for given key. * @see #setSessionData(String, Object) */ public Object getSessionData(String key) { synchronized (sessionData) { return sessionData.get(key); } } /** * Removes session data. Please see {@link #setSessionData(String, Object)} description * for more details. * * @param key a <code>String</code> value of stored data ID. * @see #setSessionData(String, Object) */ public void removeSessionData(String key) { synchronized (sessionData) { sessionData.remove(key); } } public void process(Packet packet) { // Check that the requested packet can be processed if (canProcess(packet)) { // Perform the actual processing of the packet. This usually implies sending // the packet to the entity try { // Invoke the interceptors before we send the packet InterceptorManager.getInstance().invokeInterceptors(packet, this, false, false); deliver(packet); // Invoke the interceptors after we have sent the packet InterceptorManager.getInstance().invokeInterceptors(packet, this, false, true); } catch (PacketRejectedException e) { // An interceptor rejected the packet so do nothing } catch (Exception e) { Log.error(LocaleUtils.getLocalizedString("admin.error"), e); } } } /** * Returns true if the specified packet can be delivered to the entity. Subclasses will use different * criterias to determine of processing is allowed or not. For instance, client sessions will use * privacy lists while outgoing server sessions will always allow this action. * * @param packet the packet to analyze if it must be blocked. * @return true if the specified packet must be blocked. */ abstract boolean canProcess(Packet packet); abstract void deliver(Packet packet) throws UnauthorizedException; public void deliverRawText(String text) { if (conn != null) { conn.deliverRawText(text); } } /** * Returns a text with the available stream features. Each subclass may return different * values depending whether the session has been authenticated or not. * * @return a text with the available stream features or <tt>null</tt> to add nothing. */ public abstract String getAvailableStreamFeatures(); public void close() { if (conn != null) { conn.close(); } } public boolean validate() { return conn.validate(); } public boolean isSecure() { return conn.isSecure(); } public boolean isClosed() { return conn.isClosed(); } public String getHostAddress() throws UnknownHostException { return conn.getHostAddress(); } public String getHostName() throws UnknownHostException { return conn.getHostName(); } @Override public String toString() { return super.toString() + " status: " + status + " address: " + address + " id: " + streamID; } protected static int[] decodeVersion(String version) { int[] answer = new int[] {0 , 0}; String [] versionString = version.split("\\."); answer[0] = Integer.parseInt(versionString[0]); answer[1] = Integer.parseInt(versionString[1]); return answer; } /** * Returns true if the other peer of this session presented a self-signed certificate. When * using self-signed certificate for server-2-server sessions then SASL EXTERNAL will not be * used and instead server-dialback will be preferred for vcerifying the identify of the remote * server. * * @return true if the other peer of this session presented a self-signed certificate. */ public boolean isUsingSelfSignedCertificate() { return conn.isUsingSelfSignedCertificate(); } public void setClientPacketCount(long clientPacketCount){ this.clientPacketCount = clientPacketCount; } public void setServerPacketCount(long serverPacketCount){ this.serverPacketCount = serverPacketCount; } public void setLastActiveDate(long lastActiveDate){ this.lastActiveDate = lastActiveDate; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.xmlgraphics.util; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.net.URL; import java.util.Enumeration; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.commons.io.IOUtils; /** * This class handles looking up service providers on the class path. * It implements the system described in: * * <a href='http://docs.oracle.com/javase/6/docs/technotes/guides/jar/jar.html#Service%20Provider'>JAR * File Specification Under Service Provider</a>. Note that this * interface is very similar to the one they describe which seems to * be missing in the JDK. * * @version $Id$ * * Originally authored by Thomas DeWeese. */ public final class Service { private Service() { } // Remember providers we have looked up before. static Map<String, List<String>> classMap = new java.util.HashMap<String, List<String>>(); static Map<String, List<Object>> instanceMap = new java.util.HashMap<String, List<Object>>(); /** * Returns an iterator where each element should implement the * interface (or subclass the baseclass) described by cls. The * Classes are found by searching the classpath for service files * named: 'META-INF/services/&lt;fully qualified classname&gt; that list * fully qualifted classnames of classes that implement the * service files classes interface. These classes must have * default constructors. * * @param cls The class/interface to search for providers of. */ public static synchronized Iterator<Object> providers(Class<?> cls) { String serviceFile = getServiceFilename(cls); List<Object> l = instanceMap.get(serviceFile); if (l != null) { return l.iterator(); } l = new java.util.ArrayList<Object>(); instanceMap.put(serviceFile, l); ClassLoader cl = getClassLoader(cls); if (cl != null) { List<String> names = getProviderNames(cls, cl); for (String name : names) { try { // Try and load the class Object obj = cl.loadClass(name).getDeclaredConstructor().newInstance(); // stick it into our vector... l.add(obj); } catch (Exception ex) { // Just try the next name } } } return l.iterator(); } /** * Returns an iterator where each element should be the name * of a class that implements the * interface (or subclass the baseclass) described by cls. The * Classes are found by searching the classpath for service files * named: 'META-INF/services/&lt;fully qualified classname&gt; that list * fully qualified classnames of classes that implement the * service files classes interface. * * @param cls The class/interface to search for providers of. */ public static synchronized Iterator<String> providerNames(Class<?> cls) { String serviceFile = getServiceFilename(cls); List<String> l = classMap.get(serviceFile); if (l != null) { return l.iterator(); } l = new java.util.ArrayList<String>(); classMap.put(serviceFile, l); l.addAll(getProviderNames(cls)); return l.iterator(); } /** * Returns an iterator where each element should implement the * interface (or subclass the baseclass) described by cls. The * Classes are found by searching the classpath for service files * named: 'META-INF/services/&lt;fully qualified classname&gt; that list * fully qualified classnames of classes that implement the * service files classes interface. These classes must have * default constructors if returnInstances is true. * * This is a deprecated, type-unsafe legacy method. * * @param cls The class/interface to search for providers of. * @param returnInstances true if the iterator should return instances rather than class names. * @deprecated use the type-safe methods providers(Class) or providerNames(Class) instead. */ public static Iterator<?> providers(Class<?> cls, boolean returnInstances) { return (returnInstances ? providers(cls) : providerNames(cls)); } private static List<String> getProviderNames(Class<?> cls) { return getProviderNames(cls, getClassLoader(cls)); } private static List<String> getProviderNames(Class<?> cls, ClassLoader cl) { List<String> l = new java.util.ArrayList<String>(); // No class loader so we can't find 'serviceFile'. if (cl == null) { return l; } Enumeration<URL> e; try { e = cl.getResources(getServiceFilename(cls)); } catch (IOException ioe) { return l; } while (e.hasMoreElements()) { try { URL u = e.nextElement(); InputStream is = u.openStream(); Reader r = new InputStreamReader(is, "UTF-8"); BufferedReader br = new BufferedReader(r); try { for (String line = br.readLine(); line != null; line = br.readLine()) { // First strip any comment... int idx = line.indexOf('#'); if (idx != -1) { line = line.substring(0, idx); } // Trim whitespace. line = line.trim(); if (line.length() != 0) { l.add(line); } } } finally { IOUtils.closeQuietly(br); IOUtils.closeQuietly(is); } } catch (Exception ex) { // Just try the next file... } } return l; } private static ClassLoader getClassLoader(Class<?> cls) { ClassLoader cl = null; try { cl = cls.getClassLoader(); } catch (SecurityException se) { // Ooops! can't get his class loader. } // Can always request your own class loader. But it might be 'null'. if (cl == null) { cl = Service.class.getClassLoader(); } if (cl == null) { cl = ClassLoader.getSystemClassLoader(); } return cl; } private static String getServiceFilename(Class<?> cls) { return "META-INF/services/" + cls.getName(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Arrays; import java.util.Comparator; import java.util.Map; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableDescriptors; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Tests for {@link FSTableDescriptors}. */ // Do not support to be executed in he same JVM as other tests @Category({MiscTests.class, MediumTests.class}) public class TestFSTableDescriptors { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestFSTableDescriptors.class); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final Logger LOG = LoggerFactory.getLogger(TestFSTableDescriptors.class); @Rule public TestName name = new TestName(); @Test (expected=IllegalArgumentException.class) public void testRegexAgainstOldStyleTableInfo() { Path p = new Path("/tmp", FSTableDescriptors.TABLEINFO_FILE_PREFIX); int i = FSTableDescriptors.getTableInfoSequenceId(p); assertEquals(0, i); // Assert it won't eat garbage -- that it fails p = new Path("/tmp", "abc"); FSTableDescriptors.getTableInfoSequenceId(p); } @Test public void testCreateAndUpdate() throws IOException { Path testdir = UTIL.getDataTestDir(name.getMethodName()); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); FSTableDescriptors fstd = new FSTableDescriptors(fs, testdir); assertTrue(fstd.createTableDescriptor(htd)); assertFalse(fstd.createTableDescriptor(htd)); FileStatus [] statuses = fs.listStatus(testdir); assertTrue("statuses.length="+statuses.length, statuses.length == 1); for (int i = 0; i < 10; i++) { fstd.updateTableDescriptor(htd); } statuses = fs.listStatus(testdir); assertTrue(statuses.length == 1); Path tmpTableDir = new Path(FSUtils.getTableDir(testdir, htd.getTableName()), ".tmp"); statuses = fs.listStatus(tmpTableDir); assertTrue(statuses.length == 0); } @Test public void testSequenceIdAdvancesOnTableInfo() throws IOException { Path testdir = UTIL.getDataTestDir(name.getMethodName()); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); FSTableDescriptors fstd = new FSTableDescriptors(fs, testdir); Path p0 = fstd.updateTableDescriptor(htd); int i0 = FSTableDescriptors.getTableInfoSequenceId(p0); Path p1 = fstd.updateTableDescriptor(htd); // Assert we cleaned up the old file. assertTrue(!fs.exists(p0)); int i1 = FSTableDescriptors.getTableInfoSequenceId(p1); assertTrue(i1 == i0 + 1); Path p2 = fstd.updateTableDescriptor(htd); // Assert we cleaned up the old file. assertTrue(!fs.exists(p1)); int i2 = FSTableDescriptors.getTableInfoSequenceId(p2); assertTrue(i2 == i1 + 1); Path p3 = fstd.updateTableDescriptor(htd); // Assert we cleaned up the old file. assertTrue(!fs.exists(p2)); int i3 = FSTableDescriptors.getTableInfoSequenceId(p3); assertTrue(i3 == i2 + 1); TableDescriptor descriptor = fstd.get(htd.getTableName()); assertEquals(descriptor, htd); } @Test public void testFormatTableInfoSequenceId() { Path p0 = assertWriteAndReadSequenceId(0); // Assert p0 has format we expect. StringBuilder sb = new StringBuilder(); for (int i = 0; i < FSTableDescriptors.WIDTH_OF_SEQUENCE_ID; i++) { sb.append("0"); } assertEquals(FSTableDescriptors.TABLEINFO_FILE_PREFIX + "." + sb.toString(), p0.getName()); // Check a few more. Path p2 = assertWriteAndReadSequenceId(2); Path p10000 = assertWriteAndReadSequenceId(10000); // Get a .tablinfo that has no sequenceid suffix. Path p = new Path(p0.getParent(), FSTableDescriptors.TABLEINFO_FILE_PREFIX); FileStatus fs = new FileStatus(0, false, 0, 0, 0, p); FileStatus fs0 = new FileStatus(0, false, 0, 0, 0, p0); FileStatus fs2 = new FileStatus(0, false, 0, 0, 0, p2); FileStatus fs10000 = new FileStatus(0, false, 0, 0, 0, p10000); Comparator<FileStatus> comparator = FSTableDescriptors.TABLEINFO_FILESTATUS_COMPARATOR; assertTrue(comparator.compare(fs, fs0) > 0); assertTrue(comparator.compare(fs0, fs2) > 0); assertTrue(comparator.compare(fs2, fs10000) > 0); } private Path assertWriteAndReadSequenceId(final int i) { Path p = new Path("/tmp", FSTableDescriptors.getTableInfoFileName(i)); int ii = FSTableDescriptors.getTableInfoSequenceId(p); assertEquals(i, ii); return p; } @Test public void testRemoves() throws IOException { final String name = this.name.getMethodName(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any detrius laying around. Path rootdir = new Path(UTIL.getDataTestDir(), name); TableDescriptors htds = new FSTableDescriptors(fs, rootdir); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build(); htds.update(htd); assertNotNull(htds.remove(htd.getTableName())); assertNull(htds.remove(htd.getTableName())); } @Test public void testReadingHTDFromFS() throws IOException { final String name = this.name.getMethodName(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build(); Path rootdir = UTIL.getDataTestDir(name); FSTableDescriptors fstd = new FSTableDescriptors(fs, rootdir); fstd.createTableDescriptor(htd); TableDescriptor td2 = FSTableDescriptors.getTableDescriptorFromFs(fs, rootdir, htd.getTableName()); assertTrue(htd.equals(td2)); } @Test public void testReadingOldHTDFromFS() throws IOException, DeserializationException { final String name = this.name.getMethodName(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); Path rootdir = UTIL.getDataTestDir(name); FSTableDescriptors fstd = new FSTableDescriptors(fs, rootdir); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build(); Path descriptorFile = fstd.updateTableDescriptor(htd); try (FSDataOutputStream out = fs.create(descriptorFile, true)) { out.write(TableDescriptorBuilder.toByteArray(htd)); } FSTableDescriptors fstd2 = new FSTableDescriptors(fs, rootdir); TableDescriptor td2 = fstd2.get(htd.getTableName()); assertEquals(htd, td2); FileStatus descriptorFile2 = FSTableDescriptors.getTableInfoPath(fs, fstd2.getTableDir(htd.getTableName())); byte[] buffer = TableDescriptorBuilder.toByteArray(htd); try (FSDataInputStream in = fs.open(descriptorFile2.getPath())) { in.readFully(buffer); } TableDescriptor td3 = TableDescriptorBuilder.parseFrom(buffer); assertEquals(htd, td3); } @Test public void testTableDescriptors() throws IOException, InterruptedException { final String name = this.name.getMethodName(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any debris laying around. Path rootdir = new Path(UTIL.getDataTestDir(), name); FSTableDescriptors htds = new FSTableDescriptors(fs, rootdir) { @Override public TableDescriptor get(TableName tablename) throws TableExistsException, FileNotFoundException, IOException { LOG.info(tablename + ", cachehits=" + this.cachehits); return super.get(tablename); } }; final int count = 10; // Write out table infos. for (int i = 0; i < count; i++) { htds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i)).build()); } for (int i = 0; i < count; i++) { assertTrue(htds.get(TableName.valueOf(name + i)) != null); } for (int i = 0; i < count; i++) { assertTrue(htds.get(TableName.valueOf(name + i)) != null); } // Update the table infos for (int i = 0; i < count; i++) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i)); builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i)); htds.updateTableDescriptor(builder.build()); } // Wait a while so mod time we write is for sure different. Thread.sleep(100); for (int i = 0; i < count; i++) { assertTrue(htds.get(TableName.valueOf(name + i)) != null); } for (int i = 0; i < count; i++) { assertTrue(htds.get(TableName.valueOf(name + i)) != null); } assertEquals(count * 4, htds.invocations); assertTrue("expected=" + (count * 2) + ", actual=" + htds.cachehits, htds.cachehits >= (count * 2)); } @Test public void testTableDescriptorsNoCache() throws IOException, InterruptedException { final String name = this.name.getMethodName(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any debris laying around. Path rootdir = new Path(UTIL.getDataTestDir(), name); FSTableDescriptors htds = new FSTableDescriptorsTest(fs, rootdir, false); final int count = 10; // Write out table infos. for (int i = 0; i < count; i++) { htds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i)).build()); } for (int i = 0; i < 2 * count; i++) { assertNotNull("Expected HTD, got null instead", htds.get(TableName.valueOf(name + i % 2))); } // Update the table infos for (int i = 0; i < count; i++) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i)); builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of("" + i)); htds.updateTableDescriptor(builder.build()); } for (int i = 0; i < count; i++) { assertNotNull("Expected HTD, got null instead", htds.get(TableName.valueOf(name + i))); assertTrue("Column Family " + i + " missing", htds.get(TableName.valueOf(name + i)).hasColumnFamily(Bytes.toBytes("" + i))); } assertEquals(count * 4, htds.invocations); assertEquals("expected=0, actual=" + htds.cachehits, 0, htds.cachehits); } @Test public void testGetAll() throws IOException, InterruptedException { final String name = "testGetAll"; FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any debris laying around. Path rootdir = new Path(UTIL.getDataTestDir(), name); FSTableDescriptors htds = new FSTableDescriptorsTest(fs, rootdir); final int count = 4; // Write out table infos. for (int i = 0; i < count; i++) { htds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i)).build()); } // add hbase:meta htds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.META_TABLE_NAME).build()); assertEquals("getAll() didn't return all TableDescriptors, expected: " + (count + 1) + " got: " + htds.getAll().size(), count + 1, htds.getAll().size()); } @Test public void testGetAllOrdering() throws Exception { final String name = "testGetAllOrdering"; FileSystem fs = FileSystem.get(UTIL.getConfiguration()); Path rootDir = new Path(UTIL.getDataTestDir(), name); FSTableDescriptors tds = new FSTableDescriptorsTest(fs, rootDir); String[] tableNames = new String[] { "foo", "bar", "foo:bar", "bar:foo" }; for (String tableName : tableNames) { tds.createTableDescriptor( TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)).build()); } Map<String, TableDescriptor> tables = tds.getAll(); // Remove hbase:meta from list. It shows up now since we made it dynamic. The schema // is written into the fs by the FSTableDescriptors constructor now where before it // didn't. tables.remove(TableName.META_TABLE_NAME.getNameAsString()); assertEquals(4, tables.size()); String[] tableNamesOrdered = new String[] { "bar:foo", "default:bar", "default:foo", "foo:bar" }; int i = 0; for (Map.Entry<String, TableDescriptor> entry : tables.entrySet()) { assertEquals(tableNamesOrdered[i], entry.getKey()); assertEquals(tableNamesOrdered[i], entry.getValue().getTableName().getNameWithNamespaceInclAsString()); i++; } } @Test public void testCacheConsistency() throws IOException, InterruptedException { final String name = this.name.getMethodName(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any debris laying around. Path rootdir = new Path(UTIL.getDataTestDir(), name); FSTableDescriptors chtds = new FSTableDescriptorsTest(fs, rootdir); FSTableDescriptors nonchtds = new FSTableDescriptorsTest(fs, rootdir, false); final int count = 10; // Write out table infos via non-cached FSTableDescriptors for (int i = 0; i < count; i++) { nonchtds.createTableDescriptor(TableDescriptorBuilder.newBuilder(TableName.valueOf(name + i)).build()); } // Calls to getAll() won't increase the cache counter, do per table. for (int i = 0; i < count; i++) { assertTrue(chtds.get(TableName.valueOf(name + i)) != null); } assertTrue(nonchtds.getAll().size() == chtds.getAll().size()); // add a new entry for random table name. TableName random = TableName.valueOf("random"); TableDescriptor htd = TableDescriptorBuilder.newBuilder(random).build(); nonchtds.createTableDescriptor(htd); // random will only increase the cachehit by 1 assertEquals(nonchtds.getAll().size(), chtds.getAll().size() + 1); for (Map.Entry<String, TableDescriptor> entry: nonchtds.getAll().entrySet()) { String t = (String) entry.getKey(); TableDescriptor nchtd = entry.getValue(); assertTrue("expected " + htd.toString() + " got: " + chtds.get(TableName.valueOf(t)).toString(), (nchtd.equals(chtds.get(TableName.valueOf(t))))); } } @Test public void testNoSuchTable() throws IOException { final String name = "testNoSuchTable"; FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any detrius laying around. Path rootdir = new Path(UTIL.getDataTestDir(), name); TableDescriptors htds = new FSTableDescriptors(fs, rootdir); assertNull("There shouldn't be any HTD for this table", htds.get(TableName.valueOf("NoSuchTable"))); } @Test public void testUpdates() throws IOException { final String name = "testUpdates"; FileSystem fs = FileSystem.get(UTIL.getConfiguration()); // Cleanup old tests if any detrius laying around. Path rootdir = new Path(UTIL.getDataTestDir(), name); TableDescriptors htds = new FSTableDescriptors(fs, rootdir); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name)).build(); htds.update(htd); htds.update(htd); htds.update(htd); } @Test public void testTableInfoFileStatusComparator() { FileStatus bare = new FileStatus(0, false, 0, 0, -1, new Path("/tmp", FSTableDescriptors.TABLEINFO_FILE_PREFIX)); FileStatus future = new FileStatus(0, false, 0, 0, -1, new Path("/tmp/tablinfo." + System.currentTimeMillis())); FileStatus farFuture = new FileStatus(0, false, 0, 0, -1, new Path("/tmp/tablinfo." + System.currentTimeMillis() + 1000)); FileStatus [] alist = {bare, future, farFuture}; FileStatus [] blist = {bare, farFuture, future}; FileStatus [] clist = {farFuture, bare, future}; Comparator<FileStatus> c = FSTableDescriptors.TABLEINFO_FILESTATUS_COMPARATOR; Arrays.sort(alist, c); Arrays.sort(blist, c); Arrays.sort(clist, c); // Now assert all sorted same in way we want. for (int i = 0; i < alist.length; i++) { assertTrue(alist[i].equals(blist[i])); assertTrue(blist[i].equals(clist[i])); assertTrue(clist[i].equals(i == 0? farFuture: i == 1? future: bare)); } } @Test public void testReadingInvalidDirectoryFromFS() throws IOException { FileSystem fs = FileSystem.get(UTIL.getConfiguration()); try { new FSTableDescriptors(fs, FSUtils.getRootDir(UTIL.getConfiguration())) .get(TableName.valueOf(HConstants.HBASE_TEMP_DIRECTORY)); fail("Shouldn't be able to read a table descriptor for the archive directory."); } catch (Exception e) { LOG.debug("Correctly got error when reading a table descriptor from the archive directory: " + e.getMessage()); } } @Test public void testCreateTableDescriptorUpdatesIfExistsAlready() throws IOException { Path testdir = UTIL.getDataTestDir(name.getMethodName()); TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName())).build(); FileSystem fs = FileSystem.get(UTIL.getConfiguration()); FSTableDescriptors fstd = new FSTableDescriptors(fs, testdir); assertTrue(fstd.createTableDescriptor(htd)); assertFalse(fstd.createTableDescriptor(htd)); htd = TableDescriptorBuilder.newBuilder(htd) .setValue(Bytes.toBytes("mykey"), Bytes.toBytes("myValue")) .build(); assertTrue(fstd.createTableDescriptor(htd)); //this will re-create Path tableDir = fstd.getTableDir(htd.getTableName()); Path tmpTableDir = new Path(tableDir, FSTableDescriptors.TMP_DIR); FileStatus[] statuses = fs.listStatus(tmpTableDir); assertTrue(statuses.length == 0); assertEquals(htd, FSTableDescriptors.getTableDescriptorFromFs(fs, tableDir)); } private static class FSTableDescriptorsTest extends FSTableDescriptors { public FSTableDescriptorsTest(FileSystem fs, Path rootdir) { this(fs, rootdir, true); } public FSTableDescriptorsTest(FileSystem fs, Path rootdir, boolean usecache) { super(fs, rootdir, false, usecache); } @Override public TableDescriptor get(TableName tablename) throws TableExistsException, FileNotFoundException, IOException { LOG.info((super.isUsecache() ? "Cached" : "Non-Cached") + " TableDescriptor.get() on " + tablename + ", cachehits=" + this.cachehits); return super.get(tablename); } } }
package cz.metacentrum.perun.core.api; import cz.metacentrum.perun.core.api.exceptions.ActionTypeNotExistsException; import cz.metacentrum.perun.core.api.exceptions.AlreadyAdminException; import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException; import cz.metacentrum.perun.core.api.exceptions.FacilityNotExistsException; import cz.metacentrum.perun.core.api.exceptions.GroupNotAdminException; import cz.metacentrum.perun.core.api.exceptions.GroupNotExistsException; import java.util.List; import cz.metacentrum.perun.core.api.exceptions.InternalErrorException; import cz.metacentrum.perun.core.api.exceptions.PerunBeanNotSupportedException; import cz.metacentrum.perun.core.api.exceptions.PrivilegeException; import cz.metacentrum.perun.core.api.exceptions.RoleNotSupportedException; import cz.metacentrum.perun.core.api.exceptions.UserNotAdminException; import cz.metacentrum.perun.core.api.exceptions.UserNotExistsException; import cz.metacentrum.perun.core.api.exceptions.VoNotExistsException; import cz.metacentrum.perun.core.bl.PerunBl; import cz.metacentrum.perun.core.impl.Utils; public class AuthzResolver { /** * Checks if the principal is authorized. * * @param sess perunSession * @param role required role * @param complementaryObject object which specifies particular action of the role (e.g. group) * @return true if the principal authorized, false otherwise * @throws InternalErrorException if something goes wrong */ public static boolean isAuthorized(PerunSession sess, Role role, PerunBean complementaryObject) throws InternalErrorException { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isAuthorized(sess, role, complementaryObject); } /** * Checks if the principal is authorized to do some "action" on "attribute". * - for "primary" holder * - or "primary and secondary" holder if secondary holder is not null. * * @param sess perun session * @param actionType type of action on attribute (ex.: write, read, etc...) * @param attrDef attribute what principal want to work with * @param primaryHolder primary Bean of Attribute (can't be null) * @param secondaryHolder secondary Bean of Attribute (can be null) * @return true if principal is authorized, false if not * @throws InternalErrorException */ public static boolean isAuthorizedForAttribute(PerunSession sess, ActionType actionType, AttributeDefinition attrDef, Object primaryHolder, Object secondaryHolder) throws InternalErrorException { try { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isAuthorizedForAttribute(sess, actionType, attrDef, primaryHolder, secondaryHolder); } catch (AttributeNotExistsException ex) { throw new InternalErrorException(ex); } catch (ActionTypeNotExistsException ex) { throw new InternalErrorException(ex); } } /** * Return list of roles which are authorized for doing "action" on "attribute". * * @param sess perun session * @param actionType type of action on attribute (ex.: write, read, etc...) * @param attrDef attribute what principal want to work with * @return list of roles * @throws InternalErrorException * @throws AttributeNotExistsException * @throws ActionTypeNotExistsException */ public static List<Role> getRolesWhichCanWorkWithAttribute(PerunSession sess, ActionType actionType, AttributeDefinition attrDef) throws InternalErrorException, AttributeNotExistsException, ActionTypeNotExistsException { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.getRolesWhichCanWorkWithAttribute(sess, actionType, attrDef); } /** * Checks if the principal is authorized. * * @param sess perun session * @param role required role * * @return true if the principal authorized, false otherwise * @throws InternalErrorException if something goes wrong */ public static boolean isAuthorized(PerunSession sess, Role role) throws InternalErrorException { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isAuthorized(sess, role); } /** * Returns true if the perun principal inside the perun session is vo admin. * * @param sess perun session * @return true if the perun principal is vo admin */ public static boolean isVoAdmin(PerunSession sess) { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isVoAdmin(sess); } /** * Returns true if the perun principal inside the perun session is group admin. * * @param sess perun session * @return true if the perun principal is group admin. */ public static boolean isGroupAdmin(PerunSession sess) { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isGroupAdmin(sess); } /** * Returns true if the perun principal inside the perun session is facility admin. * * @param sess perun session * @return true if the perun principal is facility admin. */ public static boolean isFacilityAdmin(PerunSession sess) { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isFacilityAdmin(sess); } /** * Returns true if the perun principal inside the perun session is resource admin. * * @param sess perun session * @return true if the perun principal is resource admin. */ public static boolean isResourceAdmin(PerunSession sess) { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isResourceAdmin(sess); } /** * Returns true if the perun principal inside the perun session is security admin. * * @param sess perun session * @return true if the perun principal is security admin. */ public static boolean isSecurityAdmin(PerunSession sess) { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isSecurityAdmin(sess); } /** * Returns true if the perun principal inside the perun session is vo observer. * * @param sess perun session * @return true if the perun principal is vo observer */ public static boolean isVoObserver(PerunSession sess) { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isVoObserver(sess); } /** * Returns true if the perun principal inside the perun session is top group creator. * * @param sess perun session * @return true if the perun principal is top group creator. */ public static boolean isTopGroupCreator(PerunSession sess) { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isTopGroupCreator(sess); } /** * Returns true if the perun principal inside the perun session is perun admin. * * @param sess perun session * @return true if the perun principal is perun admin. */ public static boolean isPerunAdmin(PerunSession sess) { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.isPerunAdmin(sess); } /** * Get all principal role names. Role is defined as a name, translation table is in Role class. * * @param sess perun session * @throws InternalErrorException * @return list of integers, which represents role from enum Role. */ public static List<String> getPrincipalRoleNames(PerunSession sess) throws InternalErrorException { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.getPrincipalRoleNames(sess); } /** * Returns user which is associated with credentials used to log-in to Perun. * * @param sess perun session * @return currently logged user * @throws UserNotExistsException * @throws InternalErrorException */ public static User getLoggedUser(PerunSession sess) throws UserNotExistsException, InternalErrorException { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.getLoggedUser(sess); } /** * Returns true if the perunPrincipal has requested role. * * @param perunPrincipal acting person for whom the role is checked * @param role role to be checked */ public static boolean hasRole(PerunPrincipal perunPrincipal, Role role) { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.hasRole(perunPrincipal, role); } /** * Set role for user and <b>all</b> complementary objects. * * If some complementary object is wrong for the role, throw an exception. * For role "perunadmin" ignore complementary objects. * * @param sess perun session * @param user the user for setting role * @param role role of user in a session * @param complementaryObjects objects for which role will be set * * @throws InternalErrorException * @throws PrivilegeException * @throws UserNotExistsException * @throws AlreadyAdminException * @throws GroupNotAdminException * @throws UserNotAdminException */ public static void setRole(PerunSession sess, User user, Role role, List<PerunBean> complementaryObjects) throws InternalErrorException, PrivilegeException, UserNotExistsException, AlreadyAdminException, GroupNotAdminException, UserNotAdminException { Utils.notNull(role, "role"); ((PerunBl) sess.getPerun()).getUsersManagerBl().checkUserExists(sess, user); if(!isAuthorized(sess, Role.PERUNADMIN)) throw new PrivilegeException("You are not privileged to use this method setRole."); cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.setRole(sess, user, role, complementaryObjects); } /** * Set role for user and <b>one</b> complementary object. * * If complementary object is wrong for the role, throw an exception. * For role "perunadmin" ignore complementary object. * * @param sess perun session * @param user the user for setting role * @param role role of user in a session * @param complementaryObject object for which role will be set * * @throws InternalErrorException * @throws PrivilegeException * @throws UserNotExistsException * @throws AlreadyAdminException */ public static void setRole(PerunSession sess, User user, PerunBean complementaryObject, Role role) throws InternalErrorException, PrivilegeException, UserNotExistsException, AlreadyAdminException { Utils.notNull(role, "role"); ((PerunBl) sess.getPerun()).getUsersManagerBl().checkUserExists(sess, user); if(!isAuthorized(sess, Role.PERUNADMIN)) throw new PrivilegeException("You are not privileged to use this method setRole."); cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.setRole(sess, user,complementaryObject, role); } /** * Set role for auhtorizedGroup and <b>all</b> complementary objects. * * If some complementary object is wrong for the role, throw an exception. * For role "perunadmin" ignore complementary objects. * * @param sess perun session * @param authorizedGroup the group for setting role * @param role role of user in a session * @param complementaryObjects objects for which role will be set * * @throws InternalErrorException * @throws PrivilegeException * @throws GroupNotExistsException * @throws AlreadyAdminException */ public static void setRole(PerunSession sess, Group authorizedGroup, Role role, List<PerunBean> complementaryObjects) throws InternalErrorException, PrivilegeException, GroupNotExistsException, AlreadyAdminException { Utils.notNull(role, "role"); ((PerunBl) sess.getPerun()).getGroupsManagerBl().checkGroupExists(sess, authorizedGroup); if(!isAuthorized(sess, Role.PERUNADMIN)) throw new PrivilegeException("You are not privileged to use this method setRole."); cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.setRole(sess, authorizedGroup, role, complementaryObjects); } /** * Set role for authorizedGroup and <b>one</b> complementary object. * * If complementary object is wrong for the role, throw an exception. * For role "perunadmin" ignore complementary object. * * @param sess perun session * @param authorizedGroup the group for setting role * @param role role of user in a session * @param complementaryObject object for which role will be set * * @throws InternalErrorException * @throws PrivilegeException * @throws GroupNotExistsException * @throws AlreadyAdminException */ public static void setRole(PerunSession sess, Group authorizedGroup, PerunBean complementaryObject, Role role) throws InternalErrorException, PrivilegeException, GroupNotExistsException, AlreadyAdminException { Utils.notNull(role, "role"); ((PerunBl) sess.getPerun()).getGroupsManagerBl().checkGroupExists(sess, authorizedGroup); if(!isAuthorized(sess, Role.PERUNADMIN)) throw new PrivilegeException("You are not privileged to use this method setRole."); cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.setRole(sess, authorizedGroup, complementaryObject, role); } /** * Unset role for user and <b>all</b> complementary objects * * If some complementary object is wrong for the role, throw an exception. * For role "perunadmin" ignore complementary objects. * * @param sess perun session * @param user the user for unsetting role * @param role role of user in a session * @param complementaryObjects objects for which role will be unset * * @throws InternalErrorException * @throws PrivilegeException * @throws UserNotExistsException * @throws UserNotAdminException */ public static void unsetRole(PerunSession sess, User user, Role role, List<PerunBean> complementaryObjects) throws InternalErrorException, PrivilegeException, UserNotExistsException, UserNotAdminException { Utils.notNull(role, "role"); ((PerunBl) sess.getPerun()).getUsersManagerBl().checkUserExists(sess, user); if(!isAuthorized(sess, Role.PERUNADMIN)) throw new PrivilegeException("You are not privileged to use this method unsetRole."); cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.unsetRole(sess, user, role, complementaryObjects); } /** * Unset role for user and <b>one</b> complementary object. * * If complementary object is wrong for the role, throw an exception. * For role "perunadmin" ignore complementary object. * * @param sess perun session * @param user the user for unsetting role * @param role role of user in a session * @param complementaryObject object for which role will be unset * * @throws InternalErrorException * @throws PrivilegeException * @throws UserNotExistsException * @throws UserNotAdminException */ public static void unsetRole(PerunSession sess, User user, PerunBean complementaryObject, Role role) throws InternalErrorException, PrivilegeException, UserNotExistsException, UserNotAdminException { Utils.notNull(role, "role"); ((PerunBl) sess.getPerun()).getUsersManagerBl().checkUserExists(sess, user); if(!isAuthorized(sess, Role.PERUNADMIN)) throw new PrivilegeException("You are not privileged to use this method unsetRole."); cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.unsetRole(sess, user, complementaryObject, role); } /** * Unset role for group and <b>all</b> complementary objects * * If some complementary object is wrong for the role, throw an exception. * For role "perunadmin" ignore complementary objects. * * @param sess perun session * @param authorizedGroup the group for unsetting role * @param role role of user in a session * @param complementaryObjects objects for which role will be unset * * @throws InternalErrorException * @throws PrivilegeException * @throws GroupNotExistsException * @throws GroupNotAdminException */ public static void unsetRole(PerunSession sess, Group authorizedGroup, Role role, List<PerunBean> complementaryObjects) throws InternalErrorException, PrivilegeException, GroupNotExistsException, GroupNotAdminException { Utils.notNull(role, "role"); ((PerunBl) sess.getPerun()).getGroupsManagerBl().checkGroupExists(sess, authorizedGroup); if(!isAuthorized(sess, Role.PERUNADMIN)) throw new PrivilegeException("You are not privileged to use this method setRole."); cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.unsetRole(sess, authorizedGroup, role, complementaryObjects); } /** * Unset role for group and <b>one</b> complementary object * * If some complementary object is wrong for the role, throw an exception. * For role "perunadmin" ignore complementary object. * * @param sess perun session * @param authorizedGroup the group for unsetting role * @param role role of user in a session * @param complementaryObject object for which role will be unset * * @throws InternalErrorException * @throws PrivilegeException * @throws GroupNotExistsException * @throws GroupNotAdminException */ public static void unsetRole(PerunSession sess, Group authorizedGroup, PerunBean complementaryObject, Role role) throws InternalErrorException, PrivilegeException, GroupNotExistsException, GroupNotAdminException { Utils.notNull(role, "role"); ((PerunBl) sess.getPerun()).getGroupsManagerBl().checkGroupExists(sess, authorizedGroup); if(!isAuthorized(sess, Role.PERUNADMIN)) throw new PrivilegeException("You are not privileged to use this method setRole."); cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.unsetRole(sess, authorizedGroup, complementaryObject, role); } /** * Get all richUser administrators for complementary object and role with specified attributes. * * If <b>onlyDirectAdmins</b> is <b>true</b>, return only direct users of the complementary object for role with specific attributes. * If <b>allUserAttributes</b> is <b>true</b>, do not specify attributes through list and return them all in objects richUser. Ignoring list of specific attributes. * * @param sess perun session * @param complementaryObjectId id of object for which we will get richUser administrators * @param complementaryObjectName name of object for which we will get richUser administrators * @param specificAttributes list of specified attributes which are needed in object richUser * @param role expected role to filter managers by * @param onlyDirectAdmins if true, get only direct user administrators (if false, get both direct and indirect) * @param allUserAttributes if true, get all possible user attributes and ignore list of specificAttributes (if false, get only specific attributes) * * @return list of richUser administrators for complementary object and role with specified attributes. * * @throws InternalErrorException * @throws PrivilegeException * @throws GroupNotExistsException * @throws VoNotExistsException * @throws FacilityNotExistsException * @throws RoleNotSupportedException * @throws PerunBeanNotSupportedException * @throws UserNotExistsException */ public static List<RichUser> getRichAdmins(PerunSession sess, int complementaryObjectId, String complementaryObjectName, List<String> specificAttributes, Role role, boolean onlyDirectAdmins, boolean allUserAttributes) throws InternalErrorException, PrivilegeException, GroupNotExistsException, VoNotExistsException, FacilityNotExistsException, RoleNotSupportedException, PerunBeanNotSupportedException, UserNotExistsException { Utils.checkPerunSession(sess); Utils.notNull(role, "role"); Utils.notNull(complementaryObjectName, "complementaryObjectName"); if(!allUserAttributes) Utils.notNull(specificAttributes, "specificAttributes"); List<RichUser> richUsers; //Try to get complementary Object if(complementaryObjectName.equals("Group")) { if(!role.equals(Role.GROUPADMIN)) throw new RoleNotSupportedException("Not supported other role than group manager for object Group."); Group group = ((PerunBl) sess.getPerun()).getGroupsManagerBl().getGroupById(sess, complementaryObjectId); richUsers = sess.getPerun().getGroupsManager().getRichAdmins(sess, group, specificAttributes, allUserAttributes, onlyDirectAdmins); } else if (complementaryObjectName.equals("Vo")) { Vo vo = ((PerunBl) sess.getPerun()).getVosManagerBl().getVoById(sess, complementaryObjectId); richUsers = sess.getPerun().getVosManager().getRichAdmins(sess, vo, role, specificAttributes, allUserAttributes, onlyDirectAdmins); } else if (complementaryObjectName.equals("Facility")) { if(!role.equals(Role.FACILITYADMIN)) throw new RoleNotSupportedException("Not supported other role than facility manager for object Facility."); Facility facility = ((PerunBl) sess.getPerun()).getFacilitiesManagerBl().getFacilityById(sess, complementaryObjectId); richUsers = sess.getPerun().getFacilitiesManager().getRichAdmins(sess, facility, specificAttributes, allUserAttributes, onlyDirectAdmins); } else { throw new PerunBeanNotSupportedException("Only Vo, Group and Facility are supported complementary names."); } return richUsers; } /** * Get all authorizedGroups for complementary object and role. * * @param sess perun session * @param complementaryObjectId id of object for which we will get richUser administrators * @param complementaryObjectName name of object for which we will get richUser administrators * @param role expected role to filter authorizedGroups by (perunadmin | voadmin | groupadmin | self | facilityadmin | voobserver | topgroupcreator) * * @return list of authorizedGroups for complementary object and role * * @throws InternalErrorException * @throws UserNotExistsException * @throws PrivilegeException * @throws GroupNotExistsException * @throws VoNotExistsException * @throws FacilityNotExistsException * @throws RoleNotSupportedException * @throws PerunBeanNotSupportedException */ public static List<Group> getAdminGroups(PerunSession sess, int complementaryObjectId, String complementaryObjectName, Role role) throws InternalErrorException, UserNotExistsException, PrivilegeException, GroupNotExistsException, VoNotExistsException, FacilityNotExistsException, RoleNotSupportedException, PerunBeanNotSupportedException { Utils.checkPerunSession(sess); Utils.notNull(role, "role"); Utils.notNull(complementaryObjectName, "complementaryObjectName"); List<Group> authorizedGroups; //Try to get complementary Object if(complementaryObjectName.equals("Group")) { if(!role.equals(Role.GROUPADMIN)) throw new RoleNotSupportedException("Not supported other role than group manager for object Group."); Group group = ((PerunBl) sess.getPerun()).getGroupsManagerBl().getGroupById(sess, complementaryObjectId); authorizedGroups = sess.getPerun().getGroupsManager().getAdminGroups(sess, group); } else if (complementaryObjectName.equals("Vo")) { Vo vo = ((PerunBl) sess.getPerun()).getVosManagerBl().getVoById(sess, complementaryObjectId); authorizedGroups = sess.getPerun().getVosManager().getAdminGroups(sess, vo, role); } else if (complementaryObjectName.equals("Facility")) { if(!role.equals(Role.FACILITYADMIN)) throw new RoleNotSupportedException("Not supported other role than facility manager for object Facility."); Facility facility = ((PerunBl) sess.getPerun()).getFacilitiesManagerBl().getFacilityById(sess, complementaryObjectId); authorizedGroups = sess.getPerun().getFacilitiesManager().getAdminGroups(sess, facility); } else { throw new PerunBeanNotSupportedException("Only Vo, Group and Facility are supported complementary names."); } return authorizedGroups; } /** * Returns PerunPrincipal object associated with current session. It contains necessary information, * including user identification, authorization and metadata. Each call of this method refresh the * session including authorization data. * * @param sess perun session * @return perunPrincipal object * @throws InternalErrorException if the PerunSession is not valid. */ public static PerunPrincipal getPerunPrincipal(PerunSession sess) throws InternalErrorException, UserNotExistsException { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.getPerunPrincipal(sess); } /** * Returns all complementary objects for defined role. * * @param sess perun session * @param role to get object for * @return list of complementary objects * @throws InternalErrorException */ public static List<PerunBean> getComplementaryObjectsForRole(PerunSession sess, Role role) throws InternalErrorException { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.getComplementaryObjectsForRole(sess, role); } /** * Returns complementary objects for defined role filtered by particular class, e.g. Vo, Group, ... * * @param sess perun session * @param role to get object for * @param perunBeanClass particular class ( Vo | Group | ... ) * @return list of complementary objects * @throws InternalErrorException */ public static List<PerunBean> getComplementaryObjectsForRole(PerunSession sess, Role role, Class perunBeanClass) throws InternalErrorException { return cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.getComplementaryObjectsForRole(sess, role, perunBeanClass); } /** * Removes all existing roles for the perunPrincipal and call init again. * * @param sess perun session * @throws InternalErrorException */ public static void refreshAuthz(PerunSession sess) throws InternalErrorException { cz.metacentrum.perun.core.blImpl.AuthzResolverBlImpl.refreshAuthz(sess); } }
package lu.albert.colorutils; import java.awt.Color; import java.util.logging.Logger; /** * Collection of algoritms found on various places on the web. * Credit is given where I could find it. * * @author malbert */ public class ColorMath { // The class logger private static final Logger logger = Logger.getLogger(ColorMath.class.getCanonicalName()); /** * Return the complementary color * * @return */ public static Color getComplement(Color input) { int[] hsl = new int[3]; int[] rgb = new int[3]; rgb2hsl(input.getRed(), input.getGreen(), input.getBlue(), hsl); int new_h = hsl[0] + 180; if (new_h >= 360) { new_h -= 360; } hsl2rgb(new_h, hsl[1], hsl[2], rgb); logger.fine(String.format("Complement r=%d g=%s b=%s", rgb[0], rgb[1], rgb[2])); try { return new Color(rgb[0], rgb[1], rgb[2]); } catch (IllegalArgumentException exc){ System.err.println(exc.getMessage()); return Color.BLACK; } } public static String toHex(Color color){ return String.format("#%02x%02x%02x", color.getRed(), color.getGreen(), color.getBlue()); } /** * Convert RGB to HSL components H ranges from 0-360, S and L ranges from 0 to 100 * @param r Red * @param g Green * @param b Blue * @param hsl This array will be assigned the HSL values (3 ints) */ public static void rgb2hsl(int r, int g, int b, int hsl[]) { float red = (r / 255f); float green = (g / 255f); float blue = (b / 255f); logger.fine(String.format("Converting r=%1.3f, g=%1.3f, b=%1.3f to HSL", red, green, blue)); float min = Math.min(red, Math.min(green, blue)); float max = Math.max(red, Math.max(green, blue)); float delta; delta = max - min; float h = 0; float s; float l; l = (max + min) / 2f; if (delta == 0) { h = 0; s = 0; } else { if (l < 0.5) { s = delta / (max + min); } else { s = delta / (2 - max - min); } float delta_r = (((max - red) / 6f) + (delta / 2f)) / delta; float delta_g = (((max - green) / 6f) + (delta / 2f)) / delta; float delta_b = (((max - blue) / 6f) + (delta / 2f)) / delta; if (red == max) { h = delta_b - delta_g; } else if (green == max) { h = (1 / 3f) + delta_r - delta_b; } else if (blue == max) { h = (2 / 3f) + delta_g - delta_r; } if (h < 0) { h += 1; } if (h > 1) { h -= 1; } } hsl[0] = Math.round(360 * h); hsl[1] = Math.round(s * 100); hsl[2] = Math.round(l * 100); logger.fine(String.format("Result of rgb2hsl: h=%d, s=%d, l=%d", hsl[0], hsl[1], hsl[2])); } /** * Convert HSL values to RGB * * @param h * @param s * @param x * @param rgb This array will be assigned the RGB values (3 ints) */ public static void hsl2rgb(int h, int s, int l, int rgb[]) { float hue = h / 360f; float saturation = s / 100f; float lightness = l / 100f; float r; float g; float b; float tmp1; float tmp2; logger.fine(String.format("Converting h=%1.3f, s=%1.3f, l=%1.3f to RGB", hue, saturation, lightness)); if (saturation == 0) { r = lightness * 255f; g = lightness * 255f; b = lightness * 255f; } else { if (lightness < 0.5) { tmp2 = lightness * (1 + saturation); } else { tmp2 = (lightness + saturation) - (saturation * lightness); } tmp1 = 2 * lightness - tmp2; r = 255 * hue2rgb(tmp1, tmp2, hue + (1f / 3f)); g = 255 * hue2rgb(tmp1, tmp2, hue); b = 255 * hue2rgb(tmp1, tmp2, hue - (1f / 3f)); } rgb[0] = Math.round(r); rgb[1] = Math.round(g); rgb[2] = Math.round(b); logger.fine(String.format("Result of hsl2rgb: r=%d, g=%d, b=%d", rgb[0], rgb[1], rgb[2])); } /** * Convert a hue value into the appropriate RGB value * @see <a href="http://www.easyrgb.com/index.php?X=MATH&H=19#text19">EasyRGB</a> * * @param v1 ? * @param v2 ? * @param vH ? * @return */ public static float hue2rgb(float v1, float v2, float vH) { if (vH < 0) { vH += 1; } if (vH > 1) { vH -= 1; } if ((6f * vH) < 1) { return (v1 + (v2 - v1) * 6f * vH); } if ((2f * vH) < 1) { return (v2); } if ((3f * vH) < 2) { return (v1 + (v2 - v1) * ((2f / 3f) - vH) * 6); } return v1; } /** * Convenience method to return a color given a HSL array; * @param hsl * @return */ public static Color fromHSL(int[] hsl){ int[] rgb = new int[3]; hsl2rgb(hsl[0], hsl[1], hsl[2], rgb); return new Color(rgb[0], rgb[1], rgb[2]); } /** * Creates an HSL array from a given color * @param color the input color * @param hsl the HSL triple */ public static void getHSL(Color color, int[] hsl) { rgb2hsl(color.getRed(), color.getGreen(), color.getBlue(), hsl); } /** * Returns the relative luminosity (luminance) of the given color. The value ranges from 0-255. * The weights were taken from http://en.wikipedia.org/wiki/Luminance_(relative) * * @param color * @return */ public static float getLuminosity(Color color) { return 0.2126f * color.getRed() + 0.7152f * color.getGreen() + 0.0722f * color.getBlue(); } /** * Disambiguation method for getLuminosity. * * @param color * @return * @see #getLuminosity(java.awt.Color) */ public static float getLuminance(Color color) { return getLuminosity(color); } // ###################################################################### // The following code has been nicked from // http://www.f4.fhtw-berlin.de/~barthel/ImageJ/ColorInspector//HTMLHelp/farbraumJava.htm // They will be adapted to be more usable void rgb2ycbcr(int r, int g, int b, int[] ycbcr) { int y = (int) (0.299 * r + 0.587 * g + 0.114 * b); int cb = (int) (-0.16874 * r - 0.33126 * g + 0.50000 * b); int cr = (int) (0.50000 * r - 0.41869 * g - 0.08131 * b); ycbcr[0] = y; ycbcr[1] = cb; ycbcr[2] = cr; } void rgb2yuv(int r, int g, int b, int[] yuv) { int y = (int) (0.299 * r + 0.587 * g + 0.114 * b); int u = (int) ((b - y) * 0.492f); int v = (int) ((r - y) * 0.877f); yuv[0] = y; yuv[1] = u; yuv[2] = v; } void rgb2hsb(int r, int g, int b, int[] hsb) { float[] hsbvals = new float[3]; Color.RGBtoHSB(r, g, b, hsbvals); } void rgb2hmmd(int r, int g, int b, int[] hmmd) { float max = (int) Math.max(Math.max(r, g), Math.max(g, b)); float min = (int) Math.min(Math.min(r, g), Math.min(g, b)); float diff = (max - min); float sum = (float) ((max + min) / 2.); float hue = 0; if (diff == 0) { hue = 0; } else if (r == max && (g - b) > 0) { hue = 60 * (g - b) / (max - min); } else if (r == max && (g - b) <= 0) { hue = 60 * (g - b) / (max - min) + 360; } else if (g == max) { hue = (float) (60 * (2. + (b - r) / (max - min))); } else if (b == max) { hue = (float) (60 * (4. + (r - g) / (max - min))); } hmmd[0] = (int) (hue); hmmd[1] = (int) (max); hmmd[2] = (int) (min); hmmd[3] = (int) (diff); } private void rgb2hsv(int r, int g, int b, int hsv[]) { int min; //Min. value of RGB int max; //Max. value of RGB int delMax; //Delta RGB value if (r > g) { min = g; max = r; } else { min = r; max = g; } if (b > max) { max = b; } if (b < min) { min = b; } delMax = max - min; float H = 0, S; float V = max; if (delMax == 0) { H = 0; S = 0; } else { S = delMax / 255f; if (r == max) { H = ((g - b) / (float) delMax) * 60; } else if (g == max) { H = (2 + (b - r) / (float) delMax) * 60; } else if (b == max) { H = (4 + (r - g) / (float) delMax) * 60; } } hsv[0] = (int) (H); hsv[1] = (int) (S * 100); hsv[2] = (int) (V * 100); } public void rgb2xyY(int R, int G, int B, int[] xyy) { //http://www.brucelindbloom.com float rf, gf, bf; float r, g, b, X, Y, Z; // RGB to XYZ r = R / 255.f; //R 0..1 g = G / 255.f; //G 0..1 b = B / 255.f; //B 0..1 if (r <= 0.04045) { r = r / 12; } else { r = (float) Math.pow((r + 0.055) / 1.055, 2.4); } if (g <= 0.04045) { g = g / 12; } else { g = (float) Math.pow((g + 0.055) / 1.055, 2.4); } if (b <= 0.04045) { b = b / 12; } else { b = (float) Math.pow((b + 0.055) / 1.055, 2.4); } X = 0.436052025f * r + 0.385081593f * g + 0.143087414f * b; Y = 0.222491598f * r + 0.71688606f * g + 0.060621486f * b; Z = 0.013929122f * r + 0.097097002f * g + 0.71418547f * b; float x; float y; float sum = X + Y + Z; if (sum != 0) { x = X / sum; y = Y / sum; } else { float Xr = 0.964221f; // reference white float Yr = 1.0f; float Zr = 0.825211f; x = Xr / (Xr + Yr + Zr); y = Yr / (Xr + Yr + Zr); } xyy[0] = (int) (255 * x + .5); xyy[1] = (int) (255 * y + .5); xyy[2] = (int) (255 * Y + .5); } public void rgb2xyz(int R, int G, int B, int[] xyz) { float rf, gf, bf; float r, g, b, X, Y, Z; r = R / 255.f; //R 0..1 g = G / 255.f; //G 0..1 b = B / 255.f; //B 0..1 if (r <= 0.04045) { r = r / 12; } else { r = (float) Math.pow((r + 0.055) / 1.055, 2.4); } if (g <= 0.04045) { g = g / 12; } else { g = (float) Math.pow((g + 0.055) / 1.055, 2.4); } if (b <= 0.04045) { b = b / 12; } else { b = (float) Math.pow((b + 0.055) / 1.055, 2.4); } X = 0.436052025f * r + 0.385081593f * g + 0.143087414f * b; Y = 0.222491598f * r + 0.71688606f * g + 0.060621486f * b; Z = 0.013929122f * r + 0.097097002f * g + 0.71418547f * b; xyz[1] = (int) (255 * Y + .5); xyz[0] = (int) (255 * X + .5); xyz[2] = (int) (255 * Z + .5); } public void rgb2lab(int R, int G, int B, int[] lab) { //http://www.brucelindbloom.com float r, g, b, X, Y, Z, fx, fy, fz, xr, yr, zr; float Ls, as, bs; float eps = 216.f / 24389.f; float k = 24389.f / 27.f; float Xr = 0.964221f; // reference white D50 float Yr = 1.0f; float Zr = 0.825211f; // RGB to XYZ r = R / 255.f; //R 0..1 g = G / 255.f; //G 0..1 b = B / 255.f; //B 0..1 // assuming sRGB (D65) if (r <= 0.04045) { r = r / 12; } else { r = (float) Math.pow((r + 0.055) / 1.055, 2.4); } if (g <= 0.04045) { g = g / 12; } else { g = (float) Math.pow((g + 0.055) / 1.055, 2.4); } if (b <= 0.04045) { b = b / 12; } else { b = (float) Math.pow((b + 0.055) / 1.055, 2.4); } X = 0.436052025f * r + 0.385081593f * g + 0.143087414f * b; Y = 0.222491598f * r + 0.71688606f * g + 0.060621486f * b; Z = 0.013929122f * r + 0.097097002f * g + 0.71418547f * b; // XYZ to Lab xr = X / Xr; yr = Y / Yr; zr = Z / Zr; if (xr > eps) { fx = (float) Math.pow(xr, 1 / 3.); } else { fx = (float) ((k * xr + 16.) / 116.); } if (yr > eps) { fy = (float) Math.pow(yr, 1 / 3.); } else { fy = (float) ((k * yr + 16.) / 116.); } if (zr > eps) { fz = (float) Math.pow(zr, 1 / 3.); } else { fz = (float) ((k * zr + 16.) / 116); } Ls = (116 * fy) - 16; as = 500 * (fx - fy); bs = 200 * (fy - fz); lab[0] = (int) (2.55 * Ls + .5); lab[1] = (int) (as + .5); lab[2] = (int) (bs + .5); } public void rgb2luv(int R, int G, int B, int[] luv) { //http://www.brucelindbloom.com float rf, gf, bf; float r, g, b, X_, Y_, Z_, X, Y, Z, fx, fy, fz, xr, yr, zr; float L; float eps = 216.f / 24389.f; float k = 24389.f / 27.f; float Xr = 0.964221f; // reference white D50 float Yr = 1.0f; float Zr = 0.825211f; // RGB to XYZ r = R / 255.f; //R 0..1 g = G / 255.f; //G 0..1 b = B / 255.f; //B 0..1 // assuming sRGB (D65) if (r <= 0.04045) { r = r / 12; } else { r = (float) Math.pow((r + 0.055) / 1.055, 2.4); } if (g <= 0.04045) { g = g / 12; } else { g = (float) Math.pow((g + 0.055) / 1.055, 2.4); } if (b <= 0.04045) { b = b / 12; } else { b = (float) Math.pow((b + 0.055) / 1.055, 2.4); } X = 0.436052025f * r + 0.385081593f * g + 0.143087414f * b; Y = 0.222491598f * r + 0.71688606f * g + 0.060621486f * b; Z = 0.013929122f * r + 0.097097002f * g + 0.71418547f * b; // XYZ to Luv float u, v, u_, v_, ur_, vr_; u_ = 4 * X / (X + 15 * Y + 3 * Z); v_ = 9 * Y / (X + 15 * Y + 3 * Z); ur_ = 4 * Xr / (Xr + 15 * Yr + 3 * Zr); vr_ = 9 * Yr / (Xr + 15 * Yr + 3 * Zr); yr = Y / Yr; if (yr > eps) { L = (float) (116 * Math.pow(yr, 1 / 3.) - 16); } else { L = k * yr; } u = 13 * L * (u_ - ur_); v = 13 * L * (v_ - vr_); luv[0] = (int) (2.55 * L + .5); luv[1] = (int) (u + .5); luv[2] = (int) (v + .5); } }
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.model.volatility.surface; import static org.testng.AssertJUnit.assertEquals; import org.testng.annotations.Test; import com.opengamma.analytics.financial.model.option.definition.SmileDeltaParameters; import com.opengamma.analytics.financial.model.volatility.VolatilityAndBucketedSensitivities; import com.opengamma.analytics.math.interpolation.Interpolator1D; import com.opengamma.analytics.math.interpolation.LinearInterpolator1D; import com.opengamma.analytics.math.interpolation.data.ArrayInterpolator1DDataBundle; import com.opengamma.analytics.math.interpolation.factory.FlatExtrapolator1dAdapter; import com.opengamma.analytics.math.interpolation.factory.LinearInterpolator1dAdapter; import com.opengamma.analytics.math.interpolation.factory.NamedInterpolator1dFactory; import com.opengamma.analytics.math.interpolation.factory.TimeSquareInterpolator1dAdapter; import com.opengamma.util.test.TestGroup; import com.opengamma.util.tuple.Triple; /** * Tests related to the construction of term structure of smile data from delta. * Tests related to the interpolation of volatility. */ @Test(groups = TestGroup.UNIT) public class SmileDeltaTermStructureParametersStrikeInterpolationTest { private static final double[] TIME_TO_EXPIRY = {0.10, 0.25, 0.50, 1.00, 2.00, 3.00}; private static final double[] ATM = {0.175, 0.185, 0.18, 0.17, 0.16, 0.17}; private static final double[] DELTA = new double[] {0.10, 0.25}; private static final double[][] RISK_REVERSAL = new double[][] { {-0.010, -0.0050}, {-0.011, -0.0060}, {-0.012, -0.0070}, {-0.013, -0.0080}, {-0.014, -0.0090}, {-0.014, -0.0090}}; private static final double[][] STRANGLE = new double[][] { {0.0300, 0.0100}, {0.0310, 0.0110}, {0.0320, 0.0120}, {0.0330, 0.0130}, {0.0340, 0.0140}, {0.0340, 0.0140}}; private static final int NB_EXP = TIME_TO_EXPIRY.length; private static final SmileDeltaParameters[] VOLATILITY_TERM = new SmileDeltaParameters[NB_EXP]; static { for (int loopexp = 0; loopexp < NB_EXP; loopexp++) { VOLATILITY_TERM[loopexp] = new SmileDeltaParameters(TIME_TO_EXPIRY[loopexp], ATM[loopexp], DELTA, RISK_REVERSAL[loopexp], STRANGLE[loopexp]); } } private static final Interpolator1D INTERPOLATOR_STRIKE = NamedInterpolator1dFactory.of(LinearInterpolator1dAdapter.NAME, FlatExtrapolator1dAdapter.NAME, FlatExtrapolator1dAdapter.NAME); private static final Interpolator1D INTERPOLATOR_TIME = NamedInterpolator1dFactory.of(TimeSquareInterpolator1dAdapter.NAME, FlatExtrapolator1dAdapter.NAME, FlatExtrapolator1dAdapter.NAME); private static final SmileDeltaTermStructureParametersStrikeInterpolation SMILE_TERM = new SmileDeltaTermStructureParametersStrikeInterpolation(VOLATILITY_TERM, INTERPOLATOR_STRIKE); private static final double TOLERANCE_VOL = 1.0E-10; @Test(expectedExceptions = IllegalArgumentException.class) public void testNullVolatility() { new SmileDeltaTermStructureParametersStrikeInterpolation(null, INTERPOLATOR_STRIKE); } @Test public void getter() { assertEquals("Smile by delta term structure: volatility", VOLATILITY_TERM, SMILE_TERM.getVolatilityTerm()); } @Test public void constructor() { final SmileDeltaTermStructureParametersStrikeInterpolation smileTerm2 = new SmileDeltaTermStructureParametersStrikeInterpolation(TIME_TO_EXPIRY, DELTA, ATM, RISK_REVERSAL, STRANGLE); assertEquals("Smile by delta term structure: constructor", SMILE_TERM, smileTerm2); } @Test public void constructor2() { final double[][] vol = new double[NB_EXP][]; for (int loopexp = 0; loopexp < NB_EXP; loopexp++) { vol[loopexp] = VOLATILITY_TERM[loopexp].getVolatility(); } final SmileDeltaTermStructureParametersStrikeInterpolation smileTermVol = new SmileDeltaTermStructureParametersStrikeInterpolation(TIME_TO_EXPIRY, DELTA, vol); assertEquals("Smile by delta term structure: constructor", SMILE_TERM, smileTermVol); } @Test /** * Tests the volatility at a point of the grid. */ public void volatilityAtPoint() { final double forward = 1.40; final double timeToExpiration = 0.50; final double[] strikes = SMILE_TERM.getVolatilityTerm()[2].getStrike(forward); final double volComputed = SMILE_TERM.getVolatility(timeToExpiration, strikes[1], forward); final double volExpected = SMILE_TERM.getVolatilityTerm()[2].getVolatility()[1]; assertEquals("Smile by delta term structure: volatility at a point", volExpected, volComputed, TOLERANCE_VOL); } @Test /** * Tests the interpolation in the strike dimension at a time of the grid. */ public void volatilityStrikeInterpolation() { final double forward = 1.40; final double timeToExpiration = 0.50; final double strike = 1.50; final double[] strikes = SMILE_TERM.getVolatilityTerm()[2].getStrike(forward); final double[] vol = SMILE_TERM.getVolatilityTerm()[2].getVolatility(); final ArrayInterpolator1DDataBundle volatilityInterpolation = new ArrayInterpolator1DDataBundle(strikes, vol); final LinearInterpolator1D interpolator = new LinearInterpolator1D(); final double volExpected = interpolator.interpolate(volatilityInterpolation, strike); final double volComputed = SMILE_TERM.getVolatility(timeToExpiration, strike, forward); assertEquals("Smile by delta term structure: volatility interpolation on strike", volExpected, volComputed, TOLERANCE_VOL); } @Test /** * Tests the extrapolation below the first expiration. */ public void volatilityBelowFirstExpiry() { final double forward = 1.40; final double timeToExpiration = 0.05; final double strike = 1.45; final SmileDeltaParameters smile = new SmileDeltaParameters(timeToExpiration, ATM[0], DELTA, RISK_REVERSAL[0], STRANGLE[0]); final double[] strikes = smile.getStrike(forward); final double[] vol = smile.getVolatility(); final ArrayInterpolator1DDataBundle volatilityInterpolation = new ArrayInterpolator1DDataBundle(strikes, vol); final double volExpected = INTERPOLATOR_STRIKE.interpolate(volatilityInterpolation, strike); final double volComputed = SMILE_TERM.getVolatility(timeToExpiration, strike, forward); assertEquals("Smile by delta term structure: volatility interpolation on strike", volExpected, volComputed, TOLERANCE_VOL); } @Test /** * Tests the extrapolation above the last expiration. */ public void volatilityAboveLastExpiry() { final double forward = 1.40; final double timeToExpiration = 5.00; final double strike = 1.45; final SmileDeltaParameters smile = new SmileDeltaParameters(timeToExpiration, ATM[NB_EXP - 1], DELTA, RISK_REVERSAL[NB_EXP - 1], STRANGLE[NB_EXP - 1]); final double[] strikes = smile.getStrike(forward); final double[] vol = smile.getVolatility(); final ArrayInterpolator1DDataBundle volatilityInterpolation = new ArrayInterpolator1DDataBundle(strikes, vol); final double volExpected = INTERPOLATOR_STRIKE.interpolate(volatilityInterpolation, strike); final double volComputed = SMILE_TERM.getVolatility(timeToExpiration, strike, forward); assertEquals("Smile by delta term structure: volatility interpolation on strike", volExpected, volComputed, TOLERANCE_VOL); } @Test /** * Tests the interpolation in the time and strike dimensions. */ public void volatilityTimeInterpolation() { final double forward = 1.40; final double timeToExpiration = 0.75; final double strike = 1.50; final double[] vol050 = SMILE_TERM.getVolatilityTerm()[2].getVolatility(); final double[] vol100 = SMILE_TERM.getVolatilityTerm()[3].getVolatility(); final double[] vol = new double[vol050.length]; for (int loopvol = 0; loopvol < vol050.length; loopvol++) { vol[loopvol] = Math.sqrt((vol050[loopvol] * vol050[loopvol] * TIME_TO_EXPIRY[2] + vol100[loopvol] * vol100[loopvol] * TIME_TO_EXPIRY[3]) / 2.0 / timeToExpiration); } final SmileDeltaParameters smile = new SmileDeltaParameters(timeToExpiration, DELTA, vol); final double[] strikes = smile.getStrike(forward); final ArrayInterpolator1DDataBundle volatilityInterpolation = new ArrayInterpolator1DDataBundle(strikes, vol); final LinearInterpolator1D interpolator = new LinearInterpolator1D(); final double volExpected = interpolator.interpolate(volatilityInterpolation, strike); final double volComputed = SMILE_TERM.getVolatility(timeToExpiration, strike, forward); assertEquals("Smile by delta term structure: volatility interpolation on strike", volExpected, volComputed, TOLERANCE_VOL); final double volTriple = SMILE_TERM.getVolatility(Triple.of(timeToExpiration, strike, forward)); assertEquals("Smile by delta term structure: volatility interpolation on strike", volComputed, volTriple, TOLERANCE_VOL); final SmileDeltaTermStructureParametersStrikeInterpolation smileTerm2 = new SmileDeltaTermStructureParametersStrikeInterpolation(TIME_TO_EXPIRY, DELTA, ATM, RISK_REVERSAL, STRANGLE, INTERPOLATOR_STRIKE, INTERPOLATOR_TIME); final double volComputed2 = smileTerm2.getVolatility(timeToExpiration, strike, forward); assertEquals("Smile by delta term structure: volatility interpolation on strike", volComputed, volComputed2, TOLERANCE_VOL); } @Test /** * Tests the interpolation and its derivative with respect to the data by comparison to finite difference. */ public void volatilityAjoint() { final double forward = 1.40; final double[] timeToExpiration = new double[] {0.75, 1.00, 2.50}; final double[] strike = new double[] {1.50, 1.70, 2.20}; final double[] tolerance = new double[] {3.0E-2, 1.0E-1, 1.0E-5}; final int nbTest = strike.length; final double shift = 0.00001; for (int looptest = 0; looptest < nbTest; looptest++) { final double vol = SMILE_TERM.getVolatility(timeToExpiration[looptest], strike[looptest], forward); final double[][] bucketTest = new double[TIME_TO_EXPIRY.length][2 * DELTA.length + 1]; final VolatilityAndBucketedSensitivities volComputed = SMILE_TERM.getVolatilityAndSensitivities(timeToExpiration[looptest], strike[looptest], forward); final double[][] bucketSensi = volComputed.getBucketedSensitivities(); assertEquals("Smile by delta term structure: volatility adjoint", vol, volComputed.getVolatility(), 1.0E-10); final SmileDeltaParameters[] volData = new SmileDeltaParameters[TIME_TO_EXPIRY.length]; final double[] volBumped = new double[2 * DELTA.length + 1]; for (int loopexp = 0; loopexp < TIME_TO_EXPIRY.length; loopexp++) { for (int loopsmile = 0; loopsmile < 2 * DELTA.length + 1; loopsmile++) { System.arraycopy(SMILE_TERM.getVolatilityTerm(), 0, volData, 0, TIME_TO_EXPIRY.length); System.arraycopy(SMILE_TERM.getVolatilityTerm()[loopexp].getVolatility(), 0, volBumped, 0, 2 * DELTA.length + 1); volBumped[loopsmile] += shift; volData[loopexp] = new SmileDeltaParameters(TIME_TO_EXPIRY[loopexp], DELTA, volBumped); final SmileDeltaTermStructureParametersStrikeInterpolation smileTermBumped = new SmileDeltaTermStructureParametersStrikeInterpolation(volData, INTERPOLATOR_STRIKE); bucketTest[loopexp][loopsmile] = (smileTermBumped.getVolatility(timeToExpiration[looptest], strike[looptest], forward) - volComputed.getVolatility()) / shift; // FIXME: the strike sensitivity to volatility is missing. To be corrected when [PLAT-1396] is fixed. assertEquals("Smile by delta term structure: (test: " + looptest + ") volatility bucket sensitivity " + loopexp + " - " + loopsmile, bucketTest[loopexp][loopsmile], bucketSensi[loopexp][loopsmile], tolerance[looptest]); } } } } @Test(enabled = false) /** * Code to graph the strikes for the given deltas at different expirations. In normal tests, should be (enabled=false). */ public void deltaSmile() { final double forward = 1.40; final double expiryMax = 2.0; final int nbExp = 50; final int nbVol = 2 * DELTA.length + 1; final double[][] strikes = new double[nbExp][nbVol]; final double[] expiries = new double[nbExp]; final double[] variancePeriodT = new double[nbVol]; final double[] volatilityT = new double[nbVol]; final double[] variancePeriod0 = new double[nbVol]; final double[] variancePeriod1 = new double[nbVol]; for (int loopexp = 0; loopexp < nbExp; loopexp++) { expiries[loopexp] = loopexp * expiryMax / nbExp; final ArrayInterpolator1DDataBundle interpData = new ArrayInterpolator1DDataBundle(TIME_TO_EXPIRY, new double[NB_EXP]); final int indexLower = interpData.getLowerBoundIndex(expiries[loopexp]); if (expiries[loopexp] < 1.0E-10) { for (int loopvol = 0; loopvol < nbVol; loopvol++) { volatilityT[loopvol] = SMILE_TERM.getVolatilityTerm()[indexLower].getVolatility()[loopvol]; } } else { final double weight0 = (TIME_TO_EXPIRY[indexLower + 1] - expiries[loopexp]) / (TIME_TO_EXPIRY[indexLower + 1] - TIME_TO_EXPIRY[indexLower]); // Implementation note: Linear interpolation on variance over the period (s^2*t). for (int loopvol = 0; loopvol < nbVol; loopvol++) { variancePeriod0[loopvol] = SMILE_TERM.getVolatilityTerm()[indexLower].getVolatility()[loopvol] * SMILE_TERM.getVolatilityTerm()[indexLower].getVolatility()[loopvol] * TIME_TO_EXPIRY[indexLower]; variancePeriod1[loopvol] = SMILE_TERM.getVolatilityTerm()[indexLower + 1].getVolatility()[loopvol] * SMILE_TERM.getVolatilityTerm()[indexLower + 1].getVolatility()[loopvol] * TIME_TO_EXPIRY[indexLower + 1]; variancePeriodT[loopvol] = weight0 * variancePeriod0[loopvol] + (1 - weight0) * variancePeriod1[loopvol]; volatilityT[loopvol] = Math.sqrt(variancePeriodT[loopvol] / expiries[loopexp]); } } final SmileDeltaParameters smile = new SmileDeltaParameters(expiries[loopexp], DELTA, volatilityT); strikes[loopexp] = smile.getStrike(forward); } } @Test(enabled = false) /** * Analysis the code performance. In normal tests, should be (enabled=false). */ public void performance() { long startTime, endTime; final int nbTest = 100000; final double forward = 1.40; final double timeToExpiration = 0.50; final double strike = 1.50; @SuppressWarnings("unused") double volComputed; startTime = System.currentTimeMillis(); for (int looptest = 0; looptest < nbTest; looptest++) { volComputed = SMILE_TERM.getVolatility(timeToExpiration, strike, forward); } endTime = System.currentTimeMillis(); System.out.println(nbTest + " Smile Delta volatility: " + (endTime - startTime) + " ms"); // Performance note: price: 18-Jun-12: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 225 ms for 100000 volatilities. } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.postgresql.model; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.Log; import org.jkiss.dbeaver.ext.postgresql.PostgreConstants; import org.jkiss.dbeaver.ext.postgresql.PostgreUtils; import org.jkiss.dbeaver.model.*; import org.jkiss.dbeaver.model.exec.DBCException; import org.jkiss.dbeaver.model.exec.DBCExecutionPurpose; import org.jkiss.dbeaver.model.exec.DBCSession; import org.jkiss.dbeaver.model.exec.jdbc.JDBCPreparedStatement; import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet; import org.jkiss.dbeaver.model.exec.jdbc.JDBCSession; import org.jkiss.dbeaver.model.exec.jdbc.JDBCStatement; import org.jkiss.dbeaver.model.impl.jdbc.JDBCExecutionContext; import org.jkiss.dbeaver.model.impl.jdbc.JDBCRemoteInstance; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectCache; import org.jkiss.dbeaver.model.impl.jdbc.cache.JDBCObjectLookupCache; import org.jkiss.dbeaver.model.meta.Association; import org.jkiss.dbeaver.model.meta.IPropertyValueListProvider; import org.jkiss.dbeaver.model.meta.Property; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.runtime.VoidProgressMonitor; import org.jkiss.dbeaver.model.struct.*; import org.jkiss.dbeaver.model.struct.rdb.DBSCatalog; import org.jkiss.utils.CommonUtils; import org.jkiss.utils.LongKeyMap; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; /** * PostgreDatabase */ public class PostgreDatabase extends JDBCRemoteInstance<PostgreDataSource> implements DBSCatalog, DBPRefreshableObject, DBPStatefulObject, DBPNamedObject2, PostgreObject, DBSObjectSelector, DBPDataTypeProvider, DBSInstanceLazy { private static final Log log = Log.getLog(PostgreDatabase.class); private transient PostgreRole initialOwner; private transient PostgreTablespace initialTablespace; private transient PostgreCharset initialEncoding; private long oid; private String name; private long ownerId; private String templateName; private long encodingId; private String collate; private String ctype; private boolean isTemplate; private boolean allowConnect; private int connectionLimit; private long tablespaceId; public final RoleCache roleCache = new RoleCache(); public final AccessMethodCache accessMethodCache = new AccessMethodCache(); public final ForeignDataWrapperCache foreignDataWrapperCache = new ForeignDataWrapperCache(); public final ForeignServerCache foreignServerCache = new ForeignServerCache(); public final LanguageCache languageCache = new LanguageCache(); public final EncodingCache encodingCache = new EncodingCache(); public final TablespaceCache tablespaceCache = new TablespaceCache(); public final LongKeyMap<PostgreDataType> dataTypeCache = new LongKeyMap<>(); public JDBCObjectLookupCache<PostgreDatabase, PostgreSchema> schemaCache; private String activeSchemaName; private final List<String> searchPath = new ArrayList<>(); private List<String> defaultSearchPath = new ArrayList<>(); private String activeUser; public PostgreDatabase(DBRProgressMonitor monitor, PostgreDataSource dataSource, ResultSet dbResult) throws DBException { super(monitor, dataSource, false); this.initCaches(); this.loadInfo(dbResult); } private void initCaches() { schemaCache = dataSource.getServerType().createSchemaCache(this); /* if (!getDataSource().isServerVersionAtLeast(8, 1)) { // Roles not supported roleCache.setCache(Collections.emptyList()); } */ } public PostgreDatabase(DBRProgressMonitor monitor, PostgreDataSource dataSource, String databaseName) throws DBException { super(monitor, dataSource, false); // We need to set name first this.name = databaseName; this.initCaches(); checkInstanceConnection(monitor); readDatabaseInfo(monitor); } public PostgreDatabase(DBRProgressMonitor monitor, PostgreDataSource dataSource, String name, PostgreRole owner, String templateName, PostgreTablespace tablespace, PostgreCharset encoding) throws DBException { super(monitor, dataSource, false); this.name = name; this.initialOwner = owner; this.initialTablespace = tablespace; this.initialEncoding = encoding; this.ownerId = owner == null ? 0 : owner.getObjectId(); this.templateName = templateName; this.tablespaceId = tablespace == null ? 0 : tablespace.getObjectId(); this.encodingId = encoding == null ? 0 : encoding.getObjectId(); this.initCaches(); } private void readDatabaseInfo(DBRProgressMonitor monitor) throws DBCException { try (JDBCSession session = getDefaultContext(true).openSession(monitor, DBCExecutionPurpose.META, "Load database info")) { try (JDBCPreparedStatement dbStat = session.prepareStatement("SELECT db.oid,db.*" + "\nFROM pg_catalog.pg_database db WHERE datname=?")) { dbStat.setString(1, name); try (JDBCResultSet dbResult = dbStat.executeQuery()) { if (dbResult.nextRow()) { loadInfo(dbResult); } } } } catch (SQLException e) { throw new DBCException(e, getDataSource()); } } public PostgreRole getInitialOwner() { return initialOwner; } public PostgreTablespace getInitialTablespace() { return initialTablespace; } public PostgreCharset getInitialEncoding() { return initialEncoding; } @Override public void checkInstanceConnection(DBRProgressMonitor monitor) throws DBException { if (executionContext == null) { initializeMainContext(monitor); initializeMetaContext(monitor); try (JDBCSession session = getDefaultContext(true).openSession(monitor, DBCExecutionPurpose.UTIL, "Detect default schema/user")) { determineDefaultObjects(session); } catch (SQLException e) { throw new DBException(e, getDataSource()); } } } private void loadInfo(ResultSet dbResult) { this.oid = JDBCUtils.safeGetLong(dbResult, "oid"); this.name = JDBCUtils.safeGetString(dbResult, "datname"); this.ownerId = JDBCUtils.safeGetLong(dbResult, "datdba"); this.encodingId = JDBCUtils.safeGetLong(dbResult, "encoding"); if (dataSource.isServerVersionAtLeast(8, 4)) { this.collate = JDBCUtils.safeGetString(dbResult, "datcollate"); this.ctype = JDBCUtils.safeGetString(dbResult, "datctype"); } this.isTemplate = JDBCUtils.safeGetBoolean(dbResult, "datistemplate"); this.allowConnect = JDBCUtils.safeGetBoolean(dbResult, "datallowconn"); this.connectionLimit = JDBCUtils.safeGetInt(dbResult, "datconnlimit"); this.tablespaceId = JDBCUtils.safeGetLong(dbResult, "dattablespace"); } @NotNull @Override public PostgreDatabase getDatabase() { return this; } @Override public long getObjectId() { return this.oid; } @NotNull @Override @Property(viewable = true, order = 2) public String getName() { return name; } @Override public void setName(String newName) { this.name = newName; } @Nullable @Override public String getDescription() { return null; } @Override public DBSObject getParentObject() { return dataSource.getContainer(); } @NotNull @Override public PostgreDataSource getDataSource() { return dataSource; } @Override public boolean isPersisted() { return true; } public boolean isActiveDatabase() { return dataSource.getDefaultInstance() == this; } /////////////////////////////////////////////////// // Properties public String getTemplateName() { return templateName; } @Nullable @Property(editable = true, updatable = true, order = 3, listProvider = RoleListProvider.class) public PostgreRole getDBA(DBRProgressMonitor monitor) throws DBException { checkInstanceConnection(monitor); return getRoleById(monitor, ownerId); } public void setDBA(PostgreRole owner) { this.ownerId = owner.getObjectId(); } @Nullable public PostgreRole getRoleById(DBRProgressMonitor monitor, long roleId) throws DBException { if (!getDataSource().getServerType().supportsRoles()) { return null; } return PostgreUtils.getObjectById(monitor, roleCache, this, roleId); } @Nullable public PostgreRole getRoleByName(DBRProgressMonitor monitor, PostgreDatabase owner, String roleName) throws DBException { if (!getDataSource().getServerType().supportsRoles()) { return null; } return roleCache.getObject(monitor, owner, roleName); } @Property(editable = false, updatable = false, order = 5/*, listProvider = CharsetListProvider.class*/) public PostgreCharset getDefaultEncoding(DBRProgressMonitor monitor) throws DBException { if (!getDataSource().getServerType().supportsEncodings()) { return null; } checkInstanceConnection(monitor); return PostgreUtils.getObjectById(monitor, encodingCache, this, encodingId); } public void setDefaultEncoding(PostgreCharset charset) throws DBException { this.encodingId = charset.getObjectId(); } @Property(order = 10) public String getCollate() { return collate; } @Property(order = 11) public String getCtype() { return ctype; } @Property(order = 12) public boolean isTemplate() { return isTemplate; } @Property(order = 13) public boolean isAllowConnect() { return allowConnect; } @Property(order = 14) public int getConnectionLimit() { return connectionLimit; } /////////////////////////////////////////////// // Infos @Association public Collection<PostgreRole> getAuthIds(DBRProgressMonitor monitor) throws DBException { checkInstanceConnection(monitor); return roleCache.getAllObjects(monitor, this); } @Association public Collection<PostgreAccessMethod> getAccessMethods(DBRProgressMonitor monitor) throws DBException { checkInstanceConnection(monitor); return accessMethodCache.getAllObjects(monitor, this); } @Association public Collection<PostgreForeignDataWrapper> getForeignDataWrappers(DBRProgressMonitor monitor) throws DBException { checkInstanceConnection(monitor); return foreignDataWrapperCache.getAllObjects(monitor, this); } @Association public Collection<PostgreForeignServer> getForeignServers(DBRProgressMonitor monitor) throws DBException { checkInstanceConnection(monitor); return foreignServerCache.getAllObjects(monitor, this); } @Association public Collection<PostgreLanguage> getLanguages(DBRProgressMonitor monitor) throws DBException { checkInstanceConnection(monitor); return languageCache.getAllObjects(monitor, this); } @Association public Collection<PostgreCharset> getEncodings(DBRProgressMonitor monitor) throws DBException { if (!getDataSource().getServerType().supportsEncodings()) { return null; } checkInstanceConnection(monitor); return encodingCache.getAllObjects(monitor, this); } /////////////////////////////////////////////// // Data types @Override public DBPDataKind resolveDataKind(String typeName, int typeID) { return dataSource.resolveDataKind(typeName, typeID); } @Override public DBSDataType resolveDataType(DBRProgressMonitor monitor, String typeFullName) throws DBException { return dataSource.resolveDataType(monitor, typeFullName); } @Override public Collection<PostgreDataType> getLocalDataTypes() { final PostgreSchema schema = getCatalogSchema(); if (schema != null) { return schema.dataTypeCache.getCachedObjects(); } return null; } @Override public PostgreDataType getLocalDataType(String typeName) { return getDataType(new VoidProgressMonitor(), typeName); } @Override public DBSDataType getLocalDataType(int typeID) { return getDataType(new VoidProgressMonitor(), typeID); } @Override public String getDefaultDataTypeName(@NotNull DBPDataKind dataKind) { return PostgreUtils.getDefaultDataTypeName(dataKind); } /////////////////////////////////////////////// // Tablespaces @Association public Collection<PostgreTablespace> getTablespaces(DBRProgressMonitor monitor) throws DBException { checkInstanceConnection(monitor); return tablespaceCache.getAllObjects(monitor, this); } @Property(editable = true, updatable = true, order = 4, listProvider = TablespaceListProvider.class) public PostgreTablespace getDefaultTablespace(DBRProgressMonitor monitor) throws DBException { checkInstanceConnection(monitor); return PostgreUtils.getObjectById(monitor, tablespaceCache, this, tablespaceId); } public void setDefaultTablespace(PostgreTablespace tablespace) throws DBException { this.tablespaceId = tablespace.getObjectId(); } public PostgreTablespace getTablespace(DBRProgressMonitor monitor, long tablespaceId) throws DBException { checkInstanceConnection(monitor); for (PostgreTablespace ts : tablespaceCache.getAllObjects(monitor, this)) { if (ts.getObjectId() == tablespaceId) { return ts; } } return null; } /////////////////////////////////////////////// // Object container @Association public Collection<PostgreSchema> getSchemas(DBRProgressMonitor monitor) throws DBException { checkInstanceConnection(monitor); // Get all schemas return schemaCache.getAllObjects(monitor, this); } @Nullable public PostgreSchema getCatalogSchema(DBRProgressMonitor monitor) throws DBException { return getSchema(monitor, PostgreConstants.CATALOG_SCHEMA_NAME); } @Nullable PostgreSchema getCatalogSchema() { return schemaCache.getCachedObject(PostgreConstants.CATALOG_SCHEMA_NAME); } void cacheDataTypes(DBRProgressMonitor monitor, boolean forceRefresh) throws DBException { if (dataTypeCache.isEmpty() || forceRefresh) { dataTypeCache.clear(); // Cache data types for (final PostgreSchema pgSchema : getSchemas(monitor)) { if (PostgreConstants.CATALOG_SCHEMA_NAME.equals(pgSchema.getName())) { pgSchema.getDataTypes(monitor); } } } } public PostgreSchema getSchema(DBRProgressMonitor monitor, String name) throws DBException { checkInstanceConnection(monitor); return schemaCache.getObject(monitor, this, name); } public PostgreSchema getSchema(DBRProgressMonitor monitor, long oid) throws DBException { checkInstanceConnection(monitor); for (PostgreSchema schema : schemaCache.getAllObjects(monitor, this)) { if (schema.getObjectId() == oid) { return schema; } } return null; } PostgreTableBase findTable(DBRProgressMonitor monitor, long schemaId, long tableId) throws DBException { PostgreSchema schema = getSchema(monitor, schemaId); if (schema == null) { log.error("Catalog " + schemaId + " not found"); return null; } return schema.getTable(monitor, tableId); } @Override public Collection<? extends DBSObject> getChildren(@NotNull DBRProgressMonitor monitor) throws DBException { return getSchemas(monitor); } @Override public DBSObject getChild(@NotNull DBRProgressMonitor monitor, @NotNull String childName) throws DBException { return getSchema(monitor, childName); } @Override public Class<? extends DBSObject> getChildType(@NotNull DBRProgressMonitor monitor) throws DBException { return PostgreSchema.class; } @Override public void cacheStructure(@NotNull DBRProgressMonitor monitor, int scope) throws DBException { } @NotNull @Override public DBSObjectState getObjectState() { if (this == dataSource.getDefaultInstance()) { return DBSObjectState.NORMAL; } else { return PostgreConstants.STATE_UNAVAILABLE; } } @Override public void refreshObjectState(@NotNull DBRProgressMonitor monitor) throws DBCException { } @Override public DBSObject refreshObject(@NotNull DBRProgressMonitor monitor) throws DBException { if (oid == 0) { // New database readDatabaseInfo(monitor); return this; } else { // Refresh all properties PostgreDatabase refDatabase = dataSource.getDatabaseCache().refreshObject(monitor, dataSource, this); if (refDatabase != null && refDatabase == dataSource.getDefaultInstance()) { // Cache types refDatabase.cacheDataTypes(monitor, true); } return refDatabase; } } public Collection<PostgreRole> getUsers(DBRProgressMonitor monitor) throws DBException { if (!getDataSource().getServerType().supportsRoles()) { return Collections.emptyList(); } checkInstanceConnection(monitor); return roleCache.getAllObjects(monitor, this); } //////////////////////////////////////////////////// // Default schema and search path public String getActiveUser() { return activeUser; } public String getActiveSchemaName() { return activeSchemaName; } public void setActiveSchemaName(String activeSchemaName) { this.activeSchemaName = activeSchemaName; } public List<String> getSearchPath() { return searchPath; } List<String> getDefaultSearchPath() { return defaultSearchPath; } public void setSearchPath(String path) { searchPath.clear(); searchPath.add(path); if (!path.equals(activeUser)) { searchPath.add(activeUser); } } private void determineDefaultObjects(JDBCSession session) throws DBCException, SQLException { try (JDBCPreparedStatement stat = session.prepareStatement("SELECT current_schema(),session_user")) { try (JDBCResultSet rs = stat.executeQuery()) { if (rs.nextRow()) { activeSchemaName = JDBCUtils.safeGetString(rs, 1); activeUser = JDBCUtils.safeGetString(rs, 2); } } } String searchPathStr = JDBCUtils.queryString(session, "SHOW search_path"); this.searchPath.clear(); if (searchPathStr != null) { for (String str : searchPathStr.split(",")) { str = str.trim(); this.searchPath.add(DBUtils.getUnQuotedIdentifier(getDataSource(), str)); } } else { this.searchPath.add(PostgreConstants.PUBLIC_SCHEMA_NAME); } defaultSearchPath = new ArrayList<>(searchPath); } @Override public boolean supportsDefaultChange() { return true; } @Nullable @Override public PostgreSchema getDefaultObject() { return schemaCache.getCachedObject(activeSchemaName); } @Override public void setDefaultObject(@NotNull DBRProgressMonitor monitor, @NotNull DBSObject object) throws DBException { if (object instanceof PostgreSchema) { PostgreSchema oldActive = getDefaultObject(); if (oldActive == object) { return; } for (JDBCExecutionContext context : getAllContexts()) { setSearchPath(monitor, (PostgreSchema) object, context); } activeSchemaName = object.getName(); setSearchPath(object.getName()); if (oldActive != null) { DBUtils.fireObjectSelect(oldActive, false); } DBUtils.fireObjectSelect(object, true); } } @Override public boolean refreshDefaultObject(@NotNull DBCSession session) throws DBException { try { String oldDefSchema = activeSchemaName; determineDefaultObjects((JDBCSession) session); if (activeSchemaName != null && !CommonUtils.equalObjects(oldDefSchema, activeSchemaName)) { final PostgreSchema newSchema = getSchema(session.getProgressMonitor(), activeSchemaName); if (newSchema != null) { setDefaultObject(session.getProgressMonitor(), newSchema); return true; } } return false; } catch (SQLException e) { throw new DBException(e, getDataSource()); } } void setSearchPath(DBRProgressMonitor monitor, PostgreSchema schema, JDBCExecutionContext context) throws DBCException { // Construct search path from current search path but put default schema first List<String> newSearchPath = new ArrayList<>(getDefaultSearchPath()); { String defSchemaName = schema.getName(); int schemaIndex = newSearchPath.indexOf(defSchemaName); if (schemaIndex == 0) { // Already default schema } else { if (schemaIndex > 0) { // Remove from previous position newSearchPath.remove(schemaIndex); } // Add it first newSearchPath.add(0, defSchemaName); } } StringBuilder spString = new StringBuilder(); for (String sp : newSearchPath) { if (spString.length() > 0) spString.append(","); spString.append(DBUtils.getQuotedIdentifier(getDataSource(), sp)); } try (JDBCSession session = context.openSession(monitor, DBCExecutionPurpose.UTIL, "Change search path")) { JDBCUtils.executeSQL(session, "SET search_path = " + spString); } catch (SQLException e) { throw new DBCException("Error setting search path", e, dataSource); } } ///////////////////////////////////////////////// // Procedures public PostgreProcedure getProcedure(DBRProgressMonitor monitor, long schemaId, long procId) throws DBException { final PostgreSchema schema = getSchema(monitor, schemaId); if (schema != null) { return PostgreUtils.getObjectById(monitor, schema.getProceduresCache(), schema, procId); } return null; } public PostgreProcedure getProcedure(DBRProgressMonitor monitor, long procId) throws DBException { for (final PostgreSchema schema : getSchemas(monitor)) { PostgreProcedure procedure = PostgreUtils.getObjectById(monitor, schema.getProceduresCache(), schema, procId); if (procedure != null) { return procedure; } } return null; } public PostgreDataType getDataType(DBRProgressMonitor monitor, long typeId) { if (typeId <= 0) { return null; } PostgreDataType dataType = dataTypeCache.get(typeId); if (dataType != null) { return dataType; } for (PostgreSchema schema : schemaCache.getCachedObjects()) { dataType = schema.dataTypeCache.getDataType(typeId); if (dataType != null) { dataTypeCache.put(typeId, dataType); return dataType; } } // Type not found. Let's resolve it try { dataType = PostgreDataTypeCache.resolveDataType(monitor, this, typeId); dataType.getParentObject().dataTypeCache.cacheObject(dataType); return dataType; } catch (Exception e) { log.debug("Can't resolve data type " + typeId, e); return null; } } public PostgreDataType getDataType(@Nullable DBRProgressMonitor monitor, String typeName) { if (typeName.endsWith("[]")) { // In some cases ResultSetMetadata returns it as [] typeName = "_" + typeName.substring(0, typeName.length() - 2); } { // First check system catalog final PostgreSchema schema = getCatalogSchema(); if (schema != null) { final PostgreDataType dataType = schema.dataTypeCache.getCachedObject(typeName); if (dataType != null) { return dataType; } } } // Check schemas in search path for (String schemaName : searchPath) { final PostgreSchema schema = schemaCache.getCachedObject(schemaName); if (schema != null) { final PostgreDataType dataType = schema.dataTypeCache.getCachedObject(typeName); if (dataType != null) { return dataType; } } } // Check the rest for (PostgreSchema schema : schemaCache.getCachedObjects()) { if (searchPath.contains(schema.getName())) { continue; } final PostgreDataType dataType = schema.dataTypeCache.getCachedObject(typeName); if (dataType != null) { return dataType; } } if (monitor == null) { return null; } // Type not found. Let's resolve it try { PostgreDataType dataType = PostgreDataTypeCache.resolveDataType(monitor, this, typeName); dataType.getParentObject().dataTypeCache.cacheObject(dataType); return dataType; } catch (Exception e) { log.debug("Can't resolve data type " + typeName, e); return null; } } @Override public String toString() { return name; } ///////////////////////////////////////////////////////////////////////////////////// // Caches class RoleCache extends JDBCObjectCache<PostgreDatabase, PostgreRole> { @Override protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull PostgreDatabase owner) throws SQLException { return session.prepareStatement( "SELECT a.oid,a.* FROM pg_catalog.pg_roles a " + "\nORDER BY a.oid" ); } @Override protected PostgreRole fetchObject(@NotNull JDBCSession session, @NotNull PostgreDatabase owner, @NotNull JDBCResultSet dbResult) throws SQLException, DBException { return new PostgreRole(owner, dbResult); } @Override protected boolean handleCacheReadError(DBException error) { // #271, #501: in some databases (AWS?) pg_authid is not accessible // FIXME: maybe some better workaround? if (PostgreConstants.EC_PERMISSION_DENIED.equals(error.getDatabaseState())) { log.warn(error); setCache(Collections.emptyList()); return true; } return false; } } class AccessMethodCache extends JDBCObjectCache<PostgreDatabase, PostgreAccessMethod> { @Override protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull PostgreDatabase owner) throws SQLException { return session.prepareStatement( "SELECT am.oid,am.* FROM pg_catalog.pg_am am " + "\nORDER BY am.oid" ); } @Override protected PostgreAccessMethod fetchObject(@NotNull JDBCSession session, @NotNull PostgreDatabase owner, @NotNull JDBCResultSet dbResult) throws SQLException, DBException { return new PostgreAccessMethod(owner, dbResult); } } class EncodingCache extends JDBCObjectCache<PostgreDatabase, PostgreCharset> { @Override protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull PostgreDatabase owner) throws SQLException { return session.prepareStatement( "SELECT c.contoencoding as encid,pg_catalog.pg_encoding_to_char(c.contoencoding) as encname\n" + "FROM pg_catalog.pg_conversion c\n" + "GROUP BY c.contoencoding\n" + "ORDER BY 2\n" ); } @Override protected PostgreCharset fetchObject(@NotNull JDBCSession session, @NotNull PostgreDatabase owner, @NotNull JDBCResultSet dbResult) throws SQLException, DBException { return new PostgreCharset(owner, dbResult); } } class LanguageCache extends JDBCObjectCache<PostgreDatabase, PostgreLanguage> { @Override protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull PostgreDatabase owner) throws SQLException { return session.prepareStatement( "SELECT l.oid,l.* FROM pg_catalog.pg_language l " + "\nORDER BY l.oid" ); } @Override protected PostgreLanguage fetchObject(@NotNull JDBCSession session, @NotNull PostgreDatabase owner, @NotNull JDBCResultSet dbResult) throws SQLException, DBException { return new PostgreLanguage(owner, dbResult); } } class ForeignDataWrapperCache extends JDBCObjectCache<PostgreDatabase, PostgreForeignDataWrapper> { @Override protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull PostgreDatabase owner) throws SQLException { return session.prepareStatement( "SELECT l.oid,l.*,p.pronamespace as handler_schema_id " + "\nFROM pg_catalog.pg_foreign_data_wrapper l" + "\nLEFT OUTER JOIN pg_catalog.pg_proc p ON p.oid=l.fdwhandler " + "\nORDER BY l.fdwname" ); } @Override protected PostgreForeignDataWrapper fetchObject(@NotNull JDBCSession session, @NotNull PostgreDatabase owner, @NotNull JDBCResultSet dbResult) throws SQLException, DBException { return new PostgreForeignDataWrapper(owner, dbResult); } } class ForeignServerCache extends JDBCObjectCache<PostgreDatabase, PostgreForeignServer> { @Override protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull PostgreDatabase owner) throws SQLException { return session.prepareStatement( "SELECT l.oid,l.* FROM pg_catalog.pg_foreign_server l" + "\nORDER BY l.srvname" ); } @Override protected PostgreForeignServer fetchObject(@NotNull JDBCSession session, @NotNull PostgreDatabase owner, @NotNull JDBCResultSet dbResult) throws SQLException, DBException { return new PostgreForeignServer(owner, dbResult); } } class TablespaceCache extends JDBCObjectCache<PostgreDatabase, PostgreTablespace> { @Override protected JDBCStatement prepareObjectsStatement(@NotNull JDBCSession session, @NotNull PostgreDatabase owner) throws SQLException { return session.prepareStatement( "SELECT t.oid,t.* FROM pg_catalog.pg_tablespace t " + "\nORDER BY t.oid" ); } @Override protected PostgreTablespace fetchObject(@NotNull JDBCSession session, @NotNull PostgreDatabase owner, @NotNull JDBCResultSet dbResult) throws SQLException, DBException { return new PostgreTablespace(owner, dbResult); } } public static class SchemaCache extends JDBCObjectLookupCache<PostgreDatabase, PostgreSchema> { @NotNull @Override public JDBCStatement prepareLookupStatement(@NotNull JDBCSession session, @NotNull PostgreDatabase database, @Nullable PostgreSchema object, @Nullable String objectName) throws SQLException { StringBuilder catalogQuery = new StringBuilder( "SELECT n.oid,n.*,d.description FROM pg_catalog.pg_namespace n\n" + "LEFT OUTER JOIN pg_catalog.pg_description d ON d.objoid=n.oid\n"); DBSObjectFilter catalogFilters = database.getDataSource().getContainer().getObjectFilter(PostgreSchema.class, null, false); if ((catalogFilters != null && !catalogFilters.isNotApplicable()) || object != null || objectName != null) { if (object != null || objectName != null) { catalogFilters = new DBSObjectFilter(); catalogFilters.addInclude(object != null ? object.getName() : objectName); } else { catalogFilters = new DBSObjectFilter(catalogFilters); // Always read catalog schema catalogFilters.addInclude(PostgreConstants.CATALOG_SCHEMA_NAME); } JDBCUtils.appendFilterClause(catalogQuery, catalogFilters, "nspname", true); } catalogQuery.append(" ORDER BY nspname"); JDBCPreparedStatement dbStat = session.prepareStatement(catalogQuery.toString()); if (catalogFilters != null) { JDBCUtils.setFilterParameters(dbStat, 1, catalogFilters); } return dbStat; } @Override protected PostgreSchema fetchObject(@NotNull JDBCSession session, @NotNull PostgreDatabase owner, @NotNull JDBCResultSet resultSet) throws SQLException, DBException { String name = JDBCUtils.safeGetString(resultSet, "nspname"); if (name == null) { return null; } if (PostgreSchema.isUtilitySchema(name) && !owner.getDataSource().getContainer().isShowUtilityObjects()) { return null; } return new PostgreSchema(owner, name, resultSet); } } public static class TablespaceListProvider implements IPropertyValueListProvider<PostgreDatabase> { @Override public boolean allowCustomValue() { return false; } @Override public Object[] getPossibleValues(PostgreDatabase object) { try { Collection<PostgreTablespace> tablespaces = object.getTablespaces(new VoidProgressMonitor()); return tablespaces.toArray(new Object[tablespaces.size()]); } catch (DBException e) { log.error(e); return new Object[0]; } } } public static class RoleListProvider implements IPropertyValueListProvider<PostgreDatabase> { @Override public boolean allowCustomValue() { return false; } @Override public Object[] getPossibleValues(PostgreDatabase object) { try { Collection<PostgreRole> roles = object.getAuthIds(new VoidProgressMonitor()); return roles.toArray(new Object[roles.size()]); } catch (DBException e) { log.error(e); return new Object[0]; } } } public static class CharsetListProvider implements IPropertyValueListProvider<PostgreDatabase> { @Override public boolean allowCustomValue() { return false; } @Override public Object[] getPossibleValues(PostgreDatabase object) { try { Collection<PostgreCharset> tablespaces = object.getEncodings(new VoidProgressMonitor()); return tablespaces.toArray(new Object[tablespaces.size()]); } catch (DBException e) { log.error(e); return new Object[0]; } } } }
/* * Copyright (c) 2009 University of Durham, England All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * Redistributions in binary * form must reproduce the above copyright notice, this list of conditions and * the following disclaimer in the documentation and/or other materials provided * with the distribution. * Neither the name of 'SynergyNet' nor the names of * its contributors may be used to endorse or promote products derived from this * software without specific prior written permission. THIS SOFTWARE IS PROVIDED * BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package synergynetframework.appsystem.table.appregistry; import java.io.IOException; import java.io.InputStream; import java.util.Iterator; import java.util.logging.Logger; import javax.xml.namespace.NamespaceContext; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import org.w3c.dom.DOMException; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import synergynetframework.appsystem.Resources; /** * The Class ApplicationRegistryXMLReader. */ public class ApplicationRegistryXMLReader { /** The Constant log. */ private static final Logger log = Logger .getLogger(ApplicationRegistryXMLReader.class.getName()); /** * Gets the client application. * * @param path * the path * @param document * the document * @return the client application * @throws XPathExpressionException * the x path expression exception * @throws DOMException * the DOM exception * @throws ClassNotFoundException * the class not found exception */ public static ApplicationInfo getClientApplication(XPath path, Document document) throws XPathExpressionException, DOMException, ClassNotFoundException { Node component = (Node) path.evaluate( "/ac:application/ac:clientcomponent", document, XPathConstants.NODE); if (component == null) { return null; } Node reactivatePolicy = (Node) path.evaluate( "/ac:application/ac:clientcomponent/ac:reactivatepolicy", document, XPathConstants.NODE); String classname = component.getAttributes().getNamedItem("classname") .getTextContent(); Node appInfo = (Node) path.evaluate("/ac:application/ac:info", document, XPathConstants.NODE); String appName = appInfo.getAttributes().getNamedItem("name") .getTextContent(); String uuid = appInfo.getAttributes().getNamedItem("uuid") .getTextContent(); boolean showIcon = Boolean.parseBoolean(component.getAttributes() .getNamedItem("showicon").getTextContent()); String versionString = appInfo.getAttributes().getNamedItem("version") .getTextContent(); ApplicationInfo info = new ApplicationInfo(classname, appName, versionString, reactivatePolicy.getTextContent()); info.setUUID(uuid); info.setApplicationType(ApplicationInfo.APPLICATION_TYPE_CLIENT); if (showIcon) { String iconresource = component.getAttributes() .getNamedItem("iconresource").getTextContent(); info.setIconResource(iconresource); info.setShowIcon(true); } else { info.setShowIcon(false); } return info; } /** * Gets the controller application. * * @param path * the path * @param document * the document * @return the controller application * @throws XPathExpressionException * the x path expression exception * @throws DOMException * the DOM exception * @throws ClassNotFoundException * the class not found exception */ private static ApplicationInfo getControllerApplication(XPath path, Document document) throws XPathExpressionException, DOMException, ClassNotFoundException { Node component = (Node) path.evaluate( "/ac:application/ac:controllercomponent", document, XPathConstants.NODE); if (component == null) { return null; } Node reactivatePolicy = (Node) path.evaluate( "/ac:application/ac:controllercomponent/ac:reactivatepolicy", document, XPathConstants.NODE); String classname = component.getAttributes().getNamedItem("classname") .getTextContent(); Node appInfo = (Node) path.evaluate("/ac:application/ac:info", document, XPathConstants.NODE); String appName = appInfo.getAttributes().getNamedItem("name") .getTextContent(); String uuid = appInfo.getAttributes().getNamedItem("uuid") .getTextContent(); boolean showIcon = Boolean.parseBoolean(component.getAttributes() .getNamedItem("showicon").getTextContent()); String versionString = appInfo.getAttributes().getNamedItem("version") .getTextContent(); ApplicationInfo info = new ApplicationInfo(classname, appName, versionString, reactivatePolicy.getTextContent()); info.setUUID(uuid); info.setApplicationType(ApplicationInfo.APPLICATION_TYPE_CONTROLLER); if (showIcon) { String iconresource = component.getAttributes() .getNamedItem("iconresource").getTextContent(); info.setIconResource(iconresource); info.setShowIcon(true); } else { info.setShowIcon(false); } return info; } /** * Gets the projector application. * * @param path * the path * @param document * the document * @return the projector application * @throws XPathExpressionException * the x path expression exception * @throws DOMException * the DOM exception * @throws ClassNotFoundException * the class not found exception */ public static ApplicationInfo getProjectorApplication(XPath path, Document document) throws XPathExpressionException, DOMException, ClassNotFoundException { Node component = (Node) path.evaluate( "/ac:application/ac:projectorcomponent", document, XPathConstants.NODE); if (component == null) { return null; } Node reactivatePolicy = (Node) path.evaluate( "/ac:application/ac:projectorcomponent/ac:reactivatepolicy", document, XPathConstants.NODE); String classname = component.getAttributes().getNamedItem("classname") .getTextContent(); Node appInfo = (Node) path.evaluate("/ac:application/ac:info", document, XPathConstants.NODE); String appName = appInfo.getAttributes().getNamedItem("name") .getTextContent(); String uuid = appInfo.getAttributes().getNamedItem("uuid") .getTextContent(); boolean showIcon = Boolean.parseBoolean(component.getAttributes() .getNamedItem("showicon").getTextContent()); String versionString = appInfo.getAttributes().getNamedItem("version") .getTextContent(); ApplicationInfo info = new ApplicationInfo(classname, appName, versionString, reactivatePolicy.getTextContent()); info.setUUID(uuid); info.setApplicationType(ApplicationInfo.APPLICATION_TYPE_PROJECTOR); if (showIcon) { String iconresource = component.getAttributes() .getNamedItem("iconresource").getTextContent(); info.setIconResource(iconresource); info.setShowIcon(true); } else { info.setShowIcon(false); } return info; } /** * Load application configuration. * * @param appConfigXML * the app config xml * @param registry * the registry * @param isDefault * the is default * @throws SAXException * the SAX exception * @throws IOException * Signals that an I/O exception has occurred. * @throws ParserConfigurationException * the parser configuration exception * @throws XPathExpressionException * the x path expression exception * @throws InstantiationException * the instantiation exception * @throws IllegalAccessException * the illegal access exception * @throws ClassNotFoundException * the class not found exception */ private static void loadApplicationConfiguration(String appConfigXML, ApplicationRegistry registry, boolean isDefault) throws SAXException, IOException, ParserConfigurationException, XPathExpressionException, InstantiationException, IllegalAccessException, ClassNotFoundException { log.info("Loading Application XML configuration from " + appConfigXML); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); factory.setValidating(true); factory.setAttribute( "http://java.sun.com/xml/jaxp/properties/schemaLanguage", "http://www.w3.org/2001/XMLSchema"); factory.setAttribute( "http://java.sun.com/xml/jaxp/properties/schemaSource", Resources.getResource( "appsetup/schemas/synergynetapplication.xsd") .toString()); DocumentBuilder builder = factory.newDocumentBuilder(); builder.setErrorHandler(new ErrorHandler() { public void error(SAXParseException exception) throws SAXException { System.out.println("Error: " + exception.getMessage()); } public void fatalError(SAXParseException exception) throws SAXException { System.out.println("Fatal error: " + exception.getMessage()); } public void warning(SAXParseException exception) throws SAXException { System.out.println("Warning: " + exception.getMessage()); } }); Document document = builder.parse(Resources .getResourceAsStream(appConfigXML)); NamespaceContext ctx = new NamespaceContext() { public String getNamespaceURI(String prefix) { if (prefix.equals("ac")) { return "http://tel.dur.ac.uk/xml/schemas/synergynetappconfig"; } return null; } public String getPrefix(String uri) { return null; } public Iterator<?> getPrefixes(String val) { return null; } }; XPathFactory pathFactory = XPathFactory.newInstance(); XPath path = pathFactory.newXPath(); path.setNamespaceContext(ctx); if (DesktopTypeXMLReader.tableMode .equals(DesktopTypeXMLReader.TABLE_MODE_CONTROLLER)) { ApplicationInfo infoController = getControllerApplication(path, document); if (infoController == null) { ApplicationInfo infoClient = getClientApplication(path, document); registry.register(infoClient); if (isDefault) { registry.setDefault(infoClient.getTheClassName()); } } else { registry.register(infoController); if (isDefault) { registry.setDefault(infoController.getTheClassName()); } } } else if (DesktopTypeXMLReader.tableMode .equals(DesktopTypeXMLReader.TABLE_MODE_PROJECTOR)) { ApplicationInfo infoProjector = getProjectorApplication(path, document); registry.register(infoProjector); if (isDefault) { registry.setDefault(infoProjector.getTheClassName()); } } else { ApplicationInfo infoClient = getClientApplication(path, document); registry.register(infoClient); if (isDefault) { registry.setDefault(infoClient.getTheClassName()); } } } /** * Load from configuration. * * @param configXMLInputStream * the config xml input stream * @param registry * the registry * @throws SAXException * the SAX exception * @throws IOException * Signals that an I/O exception has occurred. * @throws ParserConfigurationException * the parser configuration exception * @throws InstantiationException * the instantiation exception * @throws IllegalAccessException * the illegal access exception * @throws ClassNotFoundException * the class not found exception * @throws XPathExpressionException * the x path expression exception */ public static void loadFromConfiguration(InputStream configXMLInputStream, ApplicationRegistry registry) throws SAXException, IOException, ParserConfigurationException, InstantiationException, IllegalAccessException, ClassNotFoundException, XPathExpressionException { log.info("Loading Table Configuration XML"); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); factory.setValidating(true); factory.setAttribute( "http://java.sun.com/xml/jaxp/properties/schemaLanguage", "http://www.w3.org/2001/XMLSchema"); factory.setAttribute( "http://java.sun.com/xml/jaxp/properties/schemaSource", Resources .getResource("appsetup/schemas/tableconfiguration.xsd") .toString()); DocumentBuilder builder = factory.newDocumentBuilder(); builder.setErrorHandler(new ErrorHandler() { public void error(SAXParseException exception) throws SAXException { System.out.println("Error: " + exception.getMessage()); } public void fatalError(SAXParseException exception) throws SAXException { System.out.println("Fatal error: " + exception.getMessage()); } public void warning(SAXParseException exception) throws SAXException { System.out.println("Warning: " + exception.getMessage()); } }); Document document = builder.parse(configXMLInputStream); NamespaceContext ctx = new NamespaceContext() { public String getNamespaceURI(String prefix) { if (prefix.equals("tns")) { return "http://tel.dur.ac.uk/xml/schemas/tableconfiguration"; } return null; } public String getPrefix(String uri) { return null; } public Iterator<?> getPrefixes(String val) { return null; } }; XPathFactory pathFactory = XPathFactory.newInstance(); XPath path = pathFactory.newXPath(); path.setNamespaceContext(ctx); NodeList list = (NodeList) path.evaluate( "/tns:config/tns:applications/tns:application", document, XPathConstants.NODESET); for (int i = 0; i < list.getLength(); i++) { String appConfigXML = list.item(i).getAttributes() .getNamedItem("configpath").getNodeValue(); boolean enabled = Boolean.parseBoolean(list.item(i).getAttributes() .getNamedItem("enabled").getNodeValue()); boolean isDefault = Boolean.parseBoolean(list.item(i) .getAttributes().getNamedItem("default").getNodeValue()); if (enabled) { loadApplicationConfiguration(appConfigXML, registry, isDefault); } } } }
/* * Copyright 2009-2012 The MyBatis Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ibatis.datasource.unpooled; import java.io.PrintWriter; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.util.Properties; import java.util.logging.Logger; import javax.sql.DataSource; import org.apache.ibatis.io.Resources; import org.apache.ibatis.logging.LogFactory; public class UnpooledDataSource implements DataSource { private ClassLoader driverClassLoader; private Properties driverProperties; private boolean driverInitialized; private String driver; private String url; private String username; private String password; private boolean autoCommit; private Integer defaultTransactionIsolationLevel; static { DriverManager.getDrivers(); } public UnpooledDataSource() { } public UnpooledDataSource(String driver, String url, String username, String password) { this.driver = driver; this.url = url; this.username = username; this.password = password; } public UnpooledDataSource(String driver, String url, Properties driverProperties) { this.driver = driver; this.url = url; this.driverProperties = driverProperties; } public UnpooledDataSource(ClassLoader driverClassLoader, String driver, String url, String username, String password) { this.driverClassLoader = driverClassLoader; this.driver = driver; this.url = url; this.username = username; this.password = password; } public UnpooledDataSource(ClassLoader driverClassLoader, String driver, String url, Properties driverProperties) { this.driverClassLoader = driverClassLoader; this.driver = driver; this.url = url; this.driverProperties = driverProperties; } public Connection getConnection() throws SQLException { return doGetConnection(username, password); } public Connection getConnection(String username, String password) throws SQLException { return doGetConnection(username, password); } public void setLoginTimeout(int loginTimeout) throws SQLException { DriverManager.setLoginTimeout(loginTimeout); } public int getLoginTimeout() throws SQLException { return DriverManager.getLoginTimeout(); } public void setLogWriter(PrintWriter logWriter) throws SQLException { DriverManager.setLogWriter(logWriter); } public PrintWriter getLogWriter() throws SQLException { return DriverManager.getLogWriter(); } public ClassLoader getDriverClassLoader() { return driverClassLoader; } public void setDriverClassLoader(ClassLoader driverClassLoader) { this.driverClassLoader = driverClassLoader; } public Properties getDriverProperties() { return driverProperties; } public void setDriverProperties(Properties driverProperties) { this.driverProperties = driverProperties; } public String getDriver() { return driver; } public synchronized void setDriver(String driver) { this.driver = driver; driverInitialized = false; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public boolean isAutoCommit() { return autoCommit; } public void setAutoCommit(boolean autoCommit) { this.autoCommit = autoCommit; } public Integer getDefaultTransactionIsolationLevel() { return defaultTransactionIsolationLevel; } public void setDefaultTransactionIsolationLevel(Integer defaultTransactionIsolationLevel) { this.defaultTransactionIsolationLevel = defaultTransactionIsolationLevel; } private Connection doGetConnection(String username, String password) throws SQLException { Properties props = new Properties(driverProperties); if (username != null) { props.setProperty("user", username); } if (password != null) { props.setProperty("password", password); } return doGetConnection(props); } private Connection doGetConnection(Properties properties) throws SQLException { initializeDriver(); Connection connection = DriverManager.getConnection(url, properties); configureConnection(connection); return connection; } private synchronized void initializeDriver() throws SQLException { if (!driverInitialized) { driverInitialized = true; try { if (driverClassLoader != null) { Class.forName(driver, true, driverClassLoader); } else { Resources.classForName(driver); } } catch (Exception e) { throw new SQLException("Error setting driver on UnpooledDataSource. Cause: " + e); } } } private void configureConnection(Connection conn) throws SQLException { if (autoCommit != conn.getAutoCommit()) { conn.setAutoCommit(autoCommit); } if (defaultTransactionIsolationLevel != null) { conn.setTransactionIsolation(defaultTransactionIsolationLevel); } } public <T> T unwrap(Class<T> iface) throws SQLException { throw new SQLException(getClass().getName() + " is not a wrapper."); } public boolean isWrapperFor(Class<?> iface) throws SQLException { return false; } public Logger getParentLogger() { return Logger.getLogger(LogFactory.GLOBAL_LOGGER_NAME); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.bigquery.model; /** * Model definition for TrainingOptions. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the BigQuery API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class TrainingOptions extends com.google.api.client.json.GenericJson { /** * Whether to enable auto ARIMA or not. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean autoArima; /** * The max value of non-seasonal p and q. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long autoArimaMaxOrder; /** * Batch size for dnn models. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long batchSize; /** * The data frequency of a time series. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dataFrequency; /** * The column to split data with. This column won't be used as a feature. 1. When * data_split_method is CUSTOM, the corresponding column should be boolean. The rows with true * value tag are eval data, and the false are training data. 2. When data_split_method is SEQ, the * first DATA_SPLIT_EVAL_FRACTION rows (from smallest to largest) in the corresponding column are * used as training data, and the rest are eval data. It respects the order in Orderable data * types: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#data-type- * properties * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dataSplitColumn; /** * The fraction of evaluation data over the whole input data. The rest of data will be used as * training data. The format should be double. Accurate to two decimal places. Default value is * 0.2. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double dataSplitEvalFraction; /** * The data split type for training and evaluation, e.g. RANDOM. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String dataSplitMethod; /** * Distance type for clustering models. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String distanceType; /** * Dropout probability for dnn models. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double dropout; /** * Whether to stop early when the loss doesn't improve significantly any more (compared to * min_relative_progress). Used only for iterative training algorithms. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean earlyStop; /** * Feedback type that specifies which algorithm to run for matrix factorization. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String feedbackType; /** * Hidden units for dnn models. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.util.List<java.lang.Long> hiddenUnits; /** * The geographical region based on which the holidays are considered in time series modeling. If * a valid value is specified, then holiday effects modeling is enabled. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String holidayRegion; /** * The number of periods ahead that need to be forecasted. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long horizon; /** * Include drift when fitting an ARIMA model. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean includeDrift; /** * Specifies the initial learning rate for the line search learn rate strategy. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double initialLearnRate; /** * Name of input label columns in training data. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> inputLabelColumns; /** * Item column specified for matrix factorization models. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String itemColumn; /** * The column used to provide the initial centroids for kmeans algorithm when * kmeans_initialization_method is CUSTOM. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kmeansInitializationColumn; /** * The method used to initialize the centroids for kmeans algorithm. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kmeansInitializationMethod; /** * L1 regularization coefficient. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double l1Regularization; /** * L2 regularization coefficient. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double l2Regularization; /** * Weights associated with each label class, for rebalancing the training data. Only applicable * for classification models. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.Double> labelClassWeights; /** * Learning rate in training. Used only for iterative training algorithms. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double learnRate; /** * The strategy to determine learn rate for the current iteration. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String learnRateStrategy; /** * Type of loss function used during training run. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String lossType; /** * The maximum number of iterations in training. Used only for iterative training algorithms. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long maxIterations; /** * Maximum depth of a tree for boosted tree models. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long maxTreeDepth; /** * When early_stop is true, stops training when accuracy improvement is less than * 'min_relative_progress'. Used only for iterative training algorithms. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double minRelativeProgress; /** * Minimum split loss for boosted tree models. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double minSplitLoss; /** * [Beta] Google Cloud Storage URI from which the model was imported. Only applicable for imported * models. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String modelUri; /** * A specification of the non-seasonal part of the ARIMA model: the three components (p, d, q) are * the AR order, the degree of differencing, and the MA order. * The value may be {@code null}. */ @com.google.api.client.util.Key private ArimaOrder nonSeasonalOrder; /** * Number of clusters for clustering models. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long numClusters; /** * Num factors specified for matrix factorization models. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.lang.Long numFactors; /** * Optimization strategy for training linear regression models. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String optimizationStrategy; /** * Whether to preserve the input structs in output feature names. Suppose there is a struct A with * field b. When false (default), the output feature name is A_b. When true, the output feature * name is A.b. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean preserveInputStructs; /** * Subsample fraction of the training data to grow tree to prevent overfitting for boosted tree * models. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double subsample; /** * Column to be designated as time series data for ARIMA model. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String timeSeriesDataColumn; /** * The id column that will be used to indicate different time series to forecast in parallel. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String timeSeriesIdColumn; /** * Column to be designated as time series timestamp for ARIMA model. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String timeSeriesTimestampColumn; /** * User column specified for matrix factorization models. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String userColumn; /** * Hyperparameter for matrix factoration when implicit feedback type is specified. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Double walsAlpha; /** * Whether to train a model from the last checkpoint. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean warmStart; /** * Whether to enable auto ARIMA or not. * @return value or {@code null} for none */ public java.lang.Boolean getAutoArima() { return autoArima; } /** * Whether to enable auto ARIMA or not. * @param autoArima autoArima or {@code null} for none */ public TrainingOptions setAutoArima(java.lang.Boolean autoArima) { this.autoArima = autoArima; return this; } /** * The max value of non-seasonal p and q. * @return value or {@code null} for none */ public java.lang.Long getAutoArimaMaxOrder() { return autoArimaMaxOrder; } /** * The max value of non-seasonal p and q. * @param autoArimaMaxOrder autoArimaMaxOrder or {@code null} for none */ public TrainingOptions setAutoArimaMaxOrder(java.lang.Long autoArimaMaxOrder) { this.autoArimaMaxOrder = autoArimaMaxOrder; return this; } /** * Batch size for dnn models. * @return value or {@code null} for none */ public java.lang.Long getBatchSize() { return batchSize; } /** * Batch size for dnn models. * @param batchSize batchSize or {@code null} for none */ public TrainingOptions setBatchSize(java.lang.Long batchSize) { this.batchSize = batchSize; return this; } /** * The data frequency of a time series. * @return value or {@code null} for none */ public java.lang.String getDataFrequency() { return dataFrequency; } /** * The data frequency of a time series. * @param dataFrequency dataFrequency or {@code null} for none */ public TrainingOptions setDataFrequency(java.lang.String dataFrequency) { this.dataFrequency = dataFrequency; return this; } /** * The column to split data with. This column won't be used as a feature. 1. When * data_split_method is CUSTOM, the corresponding column should be boolean. The rows with true * value tag are eval data, and the false are training data. 2. When data_split_method is SEQ, the * first DATA_SPLIT_EVAL_FRACTION rows (from smallest to largest) in the corresponding column are * used as training data, and the rest are eval data. It respects the order in Orderable data * types: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#data-type- * properties * @return value or {@code null} for none */ public java.lang.String getDataSplitColumn() { return dataSplitColumn; } /** * The column to split data with. This column won't be used as a feature. 1. When * data_split_method is CUSTOM, the corresponding column should be boolean. The rows with true * value tag are eval data, and the false are training data. 2. When data_split_method is SEQ, the * first DATA_SPLIT_EVAL_FRACTION rows (from smallest to largest) in the corresponding column are * used as training data, and the rest are eval data. It respects the order in Orderable data * types: https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#data-type- * properties * @param dataSplitColumn dataSplitColumn or {@code null} for none */ public TrainingOptions setDataSplitColumn(java.lang.String dataSplitColumn) { this.dataSplitColumn = dataSplitColumn; return this; } /** * The fraction of evaluation data over the whole input data. The rest of data will be used as * training data. The format should be double. Accurate to two decimal places. Default value is * 0.2. * @return value or {@code null} for none */ public java.lang.Double getDataSplitEvalFraction() { return dataSplitEvalFraction; } /** * The fraction of evaluation data over the whole input data. The rest of data will be used as * training data. The format should be double. Accurate to two decimal places. Default value is * 0.2. * @param dataSplitEvalFraction dataSplitEvalFraction or {@code null} for none */ public TrainingOptions setDataSplitEvalFraction(java.lang.Double dataSplitEvalFraction) { this.dataSplitEvalFraction = dataSplitEvalFraction; return this; } /** * The data split type for training and evaluation, e.g. RANDOM. * @return value or {@code null} for none */ public java.lang.String getDataSplitMethod() { return dataSplitMethod; } /** * The data split type for training and evaluation, e.g. RANDOM. * @param dataSplitMethod dataSplitMethod or {@code null} for none */ public TrainingOptions setDataSplitMethod(java.lang.String dataSplitMethod) { this.dataSplitMethod = dataSplitMethod; return this; } /** * Distance type for clustering models. * @return value or {@code null} for none */ public java.lang.String getDistanceType() { return distanceType; } /** * Distance type for clustering models. * @param distanceType distanceType or {@code null} for none */ public TrainingOptions setDistanceType(java.lang.String distanceType) { this.distanceType = distanceType; return this; } /** * Dropout probability for dnn models. * @return value or {@code null} for none */ public java.lang.Double getDropout() { return dropout; } /** * Dropout probability for dnn models. * @param dropout dropout or {@code null} for none */ public TrainingOptions setDropout(java.lang.Double dropout) { this.dropout = dropout; return this; } /** * Whether to stop early when the loss doesn't improve significantly any more (compared to * min_relative_progress). Used only for iterative training algorithms. * @return value or {@code null} for none */ public java.lang.Boolean getEarlyStop() { return earlyStop; } /** * Whether to stop early when the loss doesn't improve significantly any more (compared to * min_relative_progress). Used only for iterative training algorithms. * @param earlyStop earlyStop or {@code null} for none */ public TrainingOptions setEarlyStop(java.lang.Boolean earlyStop) { this.earlyStop = earlyStop; return this; } /** * Feedback type that specifies which algorithm to run for matrix factorization. * @return value or {@code null} for none */ public java.lang.String getFeedbackType() { return feedbackType; } /** * Feedback type that specifies which algorithm to run for matrix factorization. * @param feedbackType feedbackType or {@code null} for none */ public TrainingOptions setFeedbackType(java.lang.String feedbackType) { this.feedbackType = feedbackType; return this; } /** * Hidden units for dnn models. * @return value or {@code null} for none */ public java.util.List<java.lang.Long> getHiddenUnits() { return hiddenUnits; } /** * Hidden units for dnn models. * @param hiddenUnits hiddenUnits or {@code null} for none */ public TrainingOptions setHiddenUnits(java.util.List<java.lang.Long> hiddenUnits) { this.hiddenUnits = hiddenUnits; return this; } /** * The geographical region based on which the holidays are considered in time series modeling. If * a valid value is specified, then holiday effects modeling is enabled. * @return value or {@code null} for none */ public java.lang.String getHolidayRegion() { return holidayRegion; } /** * The geographical region based on which the holidays are considered in time series modeling. If * a valid value is specified, then holiday effects modeling is enabled. * @param holidayRegion holidayRegion or {@code null} for none */ public TrainingOptions setHolidayRegion(java.lang.String holidayRegion) { this.holidayRegion = holidayRegion; return this; } /** * The number of periods ahead that need to be forecasted. * @return value or {@code null} for none */ public java.lang.Long getHorizon() { return horizon; } /** * The number of periods ahead that need to be forecasted. * @param horizon horizon or {@code null} for none */ public TrainingOptions setHorizon(java.lang.Long horizon) { this.horizon = horizon; return this; } /** * Include drift when fitting an ARIMA model. * @return value or {@code null} for none */ public java.lang.Boolean getIncludeDrift() { return includeDrift; } /** * Include drift when fitting an ARIMA model. * @param includeDrift includeDrift or {@code null} for none */ public TrainingOptions setIncludeDrift(java.lang.Boolean includeDrift) { this.includeDrift = includeDrift; return this; } /** * Specifies the initial learning rate for the line search learn rate strategy. * @return value or {@code null} for none */ public java.lang.Double getInitialLearnRate() { return initialLearnRate; } /** * Specifies the initial learning rate for the line search learn rate strategy. * @param initialLearnRate initialLearnRate or {@code null} for none */ public TrainingOptions setInitialLearnRate(java.lang.Double initialLearnRate) { this.initialLearnRate = initialLearnRate; return this; } /** * Name of input label columns in training data. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getInputLabelColumns() { return inputLabelColumns; } /** * Name of input label columns in training data. * @param inputLabelColumns inputLabelColumns or {@code null} for none */ public TrainingOptions setInputLabelColumns(java.util.List<java.lang.String> inputLabelColumns) { this.inputLabelColumns = inputLabelColumns; return this; } /** * Item column specified for matrix factorization models. * @return value or {@code null} for none */ public java.lang.String getItemColumn() { return itemColumn; } /** * Item column specified for matrix factorization models. * @param itemColumn itemColumn or {@code null} for none */ public TrainingOptions setItemColumn(java.lang.String itemColumn) { this.itemColumn = itemColumn; return this; } /** * The column used to provide the initial centroids for kmeans algorithm when * kmeans_initialization_method is CUSTOM. * @return value or {@code null} for none */ public java.lang.String getKmeansInitializationColumn() { return kmeansInitializationColumn; } /** * The column used to provide the initial centroids for kmeans algorithm when * kmeans_initialization_method is CUSTOM. * @param kmeansInitializationColumn kmeansInitializationColumn or {@code null} for none */ public TrainingOptions setKmeansInitializationColumn(java.lang.String kmeansInitializationColumn) { this.kmeansInitializationColumn = kmeansInitializationColumn; return this; } /** * The method used to initialize the centroids for kmeans algorithm. * @return value or {@code null} for none */ public java.lang.String getKmeansInitializationMethod() { return kmeansInitializationMethod; } /** * The method used to initialize the centroids for kmeans algorithm. * @param kmeansInitializationMethod kmeansInitializationMethod or {@code null} for none */ public TrainingOptions setKmeansInitializationMethod(java.lang.String kmeansInitializationMethod) { this.kmeansInitializationMethod = kmeansInitializationMethod; return this; } /** * L1 regularization coefficient. * @return value or {@code null} for none */ public java.lang.Double getL1Regularization() { return l1Regularization; } /** * L1 regularization coefficient. * @param l1Regularization l1Regularization or {@code null} for none */ public TrainingOptions setL1Regularization(java.lang.Double l1Regularization) { this.l1Regularization = l1Regularization; return this; } /** * L2 regularization coefficient. * @return value or {@code null} for none */ public java.lang.Double getL2Regularization() { return l2Regularization; } /** * L2 regularization coefficient. * @param l2Regularization l2Regularization or {@code null} for none */ public TrainingOptions setL2Regularization(java.lang.Double l2Regularization) { this.l2Regularization = l2Regularization; return this; } /** * Weights associated with each label class, for rebalancing the training data. Only applicable * for classification models. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.Double> getLabelClassWeights() { return labelClassWeights; } /** * Weights associated with each label class, for rebalancing the training data. Only applicable * for classification models. * @param labelClassWeights labelClassWeights or {@code null} for none */ public TrainingOptions setLabelClassWeights(java.util.Map<String, java.lang.Double> labelClassWeights) { this.labelClassWeights = labelClassWeights; return this; } /** * Learning rate in training. Used only for iterative training algorithms. * @return value or {@code null} for none */ public java.lang.Double getLearnRate() { return learnRate; } /** * Learning rate in training. Used only for iterative training algorithms. * @param learnRate learnRate or {@code null} for none */ public TrainingOptions setLearnRate(java.lang.Double learnRate) { this.learnRate = learnRate; return this; } /** * The strategy to determine learn rate for the current iteration. * @return value or {@code null} for none */ public java.lang.String getLearnRateStrategy() { return learnRateStrategy; } /** * The strategy to determine learn rate for the current iteration. * @param learnRateStrategy learnRateStrategy or {@code null} for none */ public TrainingOptions setLearnRateStrategy(java.lang.String learnRateStrategy) { this.learnRateStrategy = learnRateStrategy; return this; } /** * Type of loss function used during training run. * @return value or {@code null} for none */ public java.lang.String getLossType() { return lossType; } /** * Type of loss function used during training run. * @param lossType lossType or {@code null} for none */ public TrainingOptions setLossType(java.lang.String lossType) { this.lossType = lossType; return this; } /** * The maximum number of iterations in training. Used only for iterative training algorithms. * @return value or {@code null} for none */ public java.lang.Long getMaxIterations() { return maxIterations; } /** * The maximum number of iterations in training. Used only for iterative training algorithms. * @param maxIterations maxIterations or {@code null} for none */ public TrainingOptions setMaxIterations(java.lang.Long maxIterations) { this.maxIterations = maxIterations; return this; } /** * Maximum depth of a tree for boosted tree models. * @return value or {@code null} for none */ public java.lang.Long getMaxTreeDepth() { return maxTreeDepth; } /** * Maximum depth of a tree for boosted tree models. * @param maxTreeDepth maxTreeDepth or {@code null} for none */ public TrainingOptions setMaxTreeDepth(java.lang.Long maxTreeDepth) { this.maxTreeDepth = maxTreeDepth; return this; } /** * When early_stop is true, stops training when accuracy improvement is less than * 'min_relative_progress'. Used only for iterative training algorithms. * @return value or {@code null} for none */ public java.lang.Double getMinRelativeProgress() { return minRelativeProgress; } /** * When early_stop is true, stops training when accuracy improvement is less than * 'min_relative_progress'. Used only for iterative training algorithms. * @param minRelativeProgress minRelativeProgress or {@code null} for none */ public TrainingOptions setMinRelativeProgress(java.lang.Double minRelativeProgress) { this.minRelativeProgress = minRelativeProgress; return this; } /** * Minimum split loss for boosted tree models. * @return value or {@code null} for none */ public java.lang.Double getMinSplitLoss() { return minSplitLoss; } /** * Minimum split loss for boosted tree models. * @param minSplitLoss minSplitLoss or {@code null} for none */ public TrainingOptions setMinSplitLoss(java.lang.Double minSplitLoss) { this.minSplitLoss = minSplitLoss; return this; } /** * [Beta] Google Cloud Storage URI from which the model was imported. Only applicable for imported * models. * @return value or {@code null} for none */ public java.lang.String getModelUri() { return modelUri; } /** * [Beta] Google Cloud Storage URI from which the model was imported. Only applicable for imported * models. * @param modelUri modelUri or {@code null} for none */ public TrainingOptions setModelUri(java.lang.String modelUri) { this.modelUri = modelUri; return this; } /** * A specification of the non-seasonal part of the ARIMA model: the three components (p, d, q) are * the AR order, the degree of differencing, and the MA order. * @return value or {@code null} for none */ public ArimaOrder getNonSeasonalOrder() { return nonSeasonalOrder; } /** * A specification of the non-seasonal part of the ARIMA model: the three components (p, d, q) are * the AR order, the degree of differencing, and the MA order. * @param nonSeasonalOrder nonSeasonalOrder or {@code null} for none */ public TrainingOptions setNonSeasonalOrder(ArimaOrder nonSeasonalOrder) { this.nonSeasonalOrder = nonSeasonalOrder; return this; } /** * Number of clusters for clustering models. * @return value or {@code null} for none */ public java.lang.Long getNumClusters() { return numClusters; } /** * Number of clusters for clustering models. * @param numClusters numClusters or {@code null} for none */ public TrainingOptions setNumClusters(java.lang.Long numClusters) { this.numClusters = numClusters; return this; } /** * Num factors specified for matrix factorization models. * @return value or {@code null} for none */ public java.lang.Long getNumFactors() { return numFactors; } /** * Num factors specified for matrix factorization models. * @param numFactors numFactors or {@code null} for none */ public TrainingOptions setNumFactors(java.lang.Long numFactors) { this.numFactors = numFactors; return this; } /** * Optimization strategy for training linear regression models. * @return value or {@code null} for none */ public java.lang.String getOptimizationStrategy() { return optimizationStrategy; } /** * Optimization strategy for training linear regression models. * @param optimizationStrategy optimizationStrategy or {@code null} for none */ public TrainingOptions setOptimizationStrategy(java.lang.String optimizationStrategy) { this.optimizationStrategy = optimizationStrategy; return this; } /** * Whether to preserve the input structs in output feature names. Suppose there is a struct A with * field b. When false (default), the output feature name is A_b. When true, the output feature * name is A.b. * @return value or {@code null} for none */ public java.lang.Boolean getPreserveInputStructs() { return preserveInputStructs; } /** * Whether to preserve the input structs in output feature names. Suppose there is a struct A with * field b. When false (default), the output feature name is A_b. When true, the output feature * name is A.b. * @param preserveInputStructs preserveInputStructs or {@code null} for none */ public TrainingOptions setPreserveInputStructs(java.lang.Boolean preserveInputStructs) { this.preserveInputStructs = preserveInputStructs; return this; } /** * Subsample fraction of the training data to grow tree to prevent overfitting for boosted tree * models. * @return value or {@code null} for none */ public java.lang.Double getSubsample() { return subsample; } /** * Subsample fraction of the training data to grow tree to prevent overfitting for boosted tree * models. * @param subsample subsample or {@code null} for none */ public TrainingOptions setSubsample(java.lang.Double subsample) { this.subsample = subsample; return this; } /** * Column to be designated as time series data for ARIMA model. * @return value or {@code null} for none */ public java.lang.String getTimeSeriesDataColumn() { return timeSeriesDataColumn; } /** * Column to be designated as time series data for ARIMA model. * @param timeSeriesDataColumn timeSeriesDataColumn or {@code null} for none */ public TrainingOptions setTimeSeriesDataColumn(java.lang.String timeSeriesDataColumn) { this.timeSeriesDataColumn = timeSeriesDataColumn; return this; } /** * The id column that will be used to indicate different time series to forecast in parallel. * @return value or {@code null} for none */ public java.lang.String getTimeSeriesIdColumn() { return timeSeriesIdColumn; } /** * The id column that will be used to indicate different time series to forecast in parallel. * @param timeSeriesIdColumn timeSeriesIdColumn or {@code null} for none */ public TrainingOptions setTimeSeriesIdColumn(java.lang.String timeSeriesIdColumn) { this.timeSeriesIdColumn = timeSeriesIdColumn; return this; } /** * Column to be designated as time series timestamp for ARIMA model. * @return value or {@code null} for none */ public java.lang.String getTimeSeriesTimestampColumn() { return timeSeriesTimestampColumn; } /** * Column to be designated as time series timestamp for ARIMA model. * @param timeSeriesTimestampColumn timeSeriesTimestampColumn or {@code null} for none */ public TrainingOptions setTimeSeriesTimestampColumn(java.lang.String timeSeriesTimestampColumn) { this.timeSeriesTimestampColumn = timeSeriesTimestampColumn; return this; } /** * User column specified for matrix factorization models. * @return value or {@code null} for none */ public java.lang.String getUserColumn() { return userColumn; } /** * User column specified for matrix factorization models. * @param userColumn userColumn or {@code null} for none */ public TrainingOptions setUserColumn(java.lang.String userColumn) { this.userColumn = userColumn; return this; } /** * Hyperparameter for matrix factoration when implicit feedback type is specified. * @return value or {@code null} for none */ public java.lang.Double getWalsAlpha() { return walsAlpha; } /** * Hyperparameter for matrix factoration when implicit feedback type is specified. * @param walsAlpha walsAlpha or {@code null} for none */ public TrainingOptions setWalsAlpha(java.lang.Double walsAlpha) { this.walsAlpha = walsAlpha; return this; } /** * Whether to train a model from the last checkpoint. * @return value or {@code null} for none */ public java.lang.Boolean getWarmStart() { return warmStart; } /** * Whether to train a model from the last checkpoint. * @param warmStart warmStart or {@code null} for none */ public TrainingOptions setWarmStart(java.lang.Boolean warmStart) { this.warmStart = warmStart; return this; } @Override public TrainingOptions set(String fieldName, Object value) { return (TrainingOptions) super.set(fieldName, value); } @Override public TrainingOptions clone() { return (TrainingOptions) super.clone(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.kudu; import static org.apache.beam.sdk.io.kudu.KuduTestUtils.COL_ID; import static org.apache.beam.sdk.io.kudu.KuduTestUtils.GenerateUpsert; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.mockito.Mockito.withSettings; import java.io.IOException; import java.io.ObjectInputStream; import java.io.Serializable; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import org.apache.beam.sdk.coders.BigEndianIntegerCoder; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.GenerateSequence; import org.apache.beam.sdk.testing.ExpectedLogs; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.transforms.Count; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.values.PCollection; import org.apache.kudu.client.KuduException; import org.apache.kudu.client.Operation; import org.apache.kudu.client.RowResult; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A test of {@link KuduIO} using fake Kudu services. * * <p>Since Kudu is written in C++ it does not currently lend itself to easy unit tests from a Java * environment. The Kudu project is actively working on a solution for this (see <a * href="https://issues.apache.org/jira/browse/KUDU-2411">KUDU-2411</a>) which will be used in the * future. In the meantime, only rudimentary tests exist here, with the preferred testing being * carried out in {@link KuduIOIT}. */ @RunWith(JUnit4.class) public class KuduIOTest { private static final Logger LOG = LoggerFactory.getLogger(KuduIOTest.class); @Rule public final TestPipeline writePipeline = TestPipeline.create(); @Rule public final TestPipeline readPipeline = TestPipeline.create(); @Rule public ExpectedException thrown = ExpectedException.none(); @Rule public final transient ExpectedLogs expectedWriteLogs = ExpectedLogs.none(FakeWriter.class); @Rule public final transient ExpectedLogs expectedReadLogs = ExpectedLogs.none(FakeReader.class); private KuduService<Integer> mockReadService; private KuduService<String> mockWriteService; private final int numberRecords = 10; private int targetParallelism = 3; // determined by the runner, but direct has min of 3 @Before public void setUp() throws Exception { mockReadService = mock(KuduService.class, withSettings().serializable()); mockWriteService = mock(KuduService.class, withSettings().serializable()); } /** * Tests the read path using a {@link FakeReader}. The {@link KuduService} is mocked to simulate 4 * tablets and fake the encoding of a scanner for each tablet. The test verifies that the {@link * KuduIO} correctly splits into 4 sources and instantiates a reader for each, and that the * correct number of records are read. */ @Test public void testRead() throws KuduException { when(mockReadService.createReader(any())).thenAnswer(new FakeReaderAnswer()); // Simulate the equivalent of Kudu providing an encoded scanner per tablet. Here we encode // a range which the fake reader will use to simulate a single tablet read. List<byte[]> fakeScanners = Arrays.asList( ByteBuffer.allocate(8).putInt(0).putInt(25).array(), ByteBuffer.allocate(8).putInt(25).putInt(50).array(), ByteBuffer.allocate(8).putInt(50).putInt(75).array(), ByteBuffer.allocate(8).putInt(75).putInt(100).array()); when(mockReadService.createTabletScanners(any())).thenReturn(fakeScanners); PCollection<Integer> output = readPipeline.apply( KuduIO.<Integer>read() .withMasterAddresses("mock") .withTable("Table") // the fake reader only deals with a single int .withParseFn( (SerializableFunction<RowResult, Integer>) input -> input.getInt(COL_ID)) .withKuduService(mockReadService) .withCoder(BigEndianIntegerCoder.of())); PAssert.thatSingleton(output.apply("Count", Count.globally())).isEqualTo((long) 100); readPipeline.run().waitUntilFinish(); // check that the fake tablet ranges were read expectedReadLogs.verifyDebug(String.format(FakeReader.LOG_SET_RANGE, 0, 25)); expectedReadLogs.verifyDebug(String.format(FakeReader.LOG_SET_RANGE, 25, 50)); expectedReadLogs.verifyDebug(String.format(FakeReader.LOG_SET_RANGE, 50, 75)); expectedReadLogs.verifyDebug(String.format(FakeReader.LOG_SET_RANGE, 75, 100)); } /** * Test the write path using a {@link FakeWriter} and verifies the expected log statements are * written. This test ensures that the {@link KuduIO} correctly respects parallelism by * deserializing writers and that each writer is opening and closing Kudu sessions. */ @Test @Ignore public void testWrite() throws Exception { when(mockWriteService.createWriter(any())).thenReturn(new FakeWriter()); writePipeline .apply("Generate sequence", GenerateSequence.from(0).to(numberRecords)) .apply( "Write records to Kudu", KuduIO.write() .withMasterAddresses("ignored") .withTable("ignored") .withFormatFn(new GenerateUpsert()) // ignored (mocking Operation is pointless) .withKuduService(mockWriteService)); writePipeline.run().waitUntilFinish(); for (int i = 1; i <= targetParallelism; i++) { expectedWriteLogs.verifyDebug(String.format(FakeWriter.LOG_OPEN_SESSION, i)); expectedWriteLogs.verifyDebug( String.format(FakeWriter.LOG_WRITE, i)); // at least one per writer expectedWriteLogs.verifyDebug(String.format(FakeWriter.LOG_CLOSE_SESSION, i)); } // verify all entries written for (int n = 0; n < numberRecords; n++) { expectedWriteLogs.verifyDebug( String.format(FakeWriter.LOG_WRITE_VALUE, n)); // at least one per writer } } /** * A fake writer which logs operations using a unique id for the writer instance. The initial * writer is created with and id of 0 and each deserialized instance will receive a unique integer * id. * * <p>This writer allows tests to verify that sessions are opened and closed and the entities are * passed to the write operation. However, the {@code formatFn} is ignored as the mocking required * to replicate the {@link Operation} would render it a meaningless check. */ private static class FakeWriter implements KuduService.Writer<Long> { private static final Logger LOG = LoggerFactory.getLogger(FakeWriter.class); static final String LOG_OPEN_SESSION = "FakeWriter[%d] openSession"; static final String LOG_WRITE = "FakeWriter[%d] write"; static final String LOG_WRITE_VALUE = "FakeWriter value[%d]"; static final String LOG_CLOSE_SESSION = "FakeWriter[%d] closeSession"; // share a counter across instances to uniquely identify the writers private static final AtomicInteger counter = new AtomicInteger(0); private transient int id = 0; // set on deserialization @Override public void openSession() { LOG.debug(String.format(LOG_OPEN_SESSION, id)); } @Override public void write(Long entity) { LOG.debug(String.format(LOG_WRITE, entity)); LOG.debug(String.format(LOG_WRITE_VALUE, entity)); } @Override public void closeSession() { LOG.debug(String.format(LOG_CLOSE_SESSION, id)); } @Override public void close() { // called on teardown which give no guarantees LOG.debug("FakeWriter[{}] closed.", id); } /** Sets the unique id on deserialzation using the shared counter. */ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); id = counter.incrementAndGet(); } } /** * A fake reader which will return ascending integers from either 0 to 99 unless or using the * range specified in the serlialized token in the source. This is faking the behavior of the * scanner serialization in Kudu. */ private static class FakeReader extends BoundedSource.BoundedReader<Integer> { private static final Logger LOG = LoggerFactory.getLogger(FakeReader.class); static final String LOG_SET_RANGE = "FakeReader serializedToken gives range %d - %d"; private final KuduIO.KuduSource<Integer> source; private int lowerInclusive = 0; private int upperExclusive = 100; private int current = 0; private RowResult mockRecord = mock(RowResult.class); // simulate a row from Kudu FakeReader(KuduIO.KuduSource<Integer> source) { this.source = source; // any request for an int from the mocked row will return the current value when(mockRecord.getInt(any())).thenAnswer((Answer<Integer>) invocation -> current); } @Override public boolean start() { // simulate the deserialization of a tablet scanner if (source.serializedToken != null) { ByteBuffer bb = ByteBuffer.wrap(source.serializedToken); lowerInclusive = bb.getInt(); upperExclusive = bb.getInt(); LOG.debug(String.format(LOG_SET_RANGE, lowerInclusive, upperExclusive)); } current = lowerInclusive; return true; } @Override public boolean advance() { current++; return current < upperExclusive; } @Override public Integer getCurrent() { return source.spec.getParseFn().apply(mockRecord); } @Override public void close() {} @Override public BoundedSource<Integer> getCurrentSource() { return source; } } // required to be a static class for serialization static class FakeReaderAnswer implements Answer<FakeReader>, Serializable { @Override public FakeReader answer(InvocationOnMock invocation) { Object[] args = invocation.getArguments(); return new FakeReader((KuduIO.KuduSource<Integer>) args[0]); } } }
/* * JBoss, Home of Professional Open Source * Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors * as indicated by the @authors tag. All rights reserved. */ package org.searchisko.contribprofile.provider; import java.io.IOException; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.ejb.LocalBean; import javax.ejb.Stateless; import javax.inject.Inject; import javax.inject.Named; import org.apache.commons.lang.StringUtils; import org.apache.http.HttpEntity; import org.apache.http.auth.AuthenticationException; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.auth.BasicScheme; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.elasticsearch.common.settings.SettingsException; import org.searchisko.api.ContentObjectFields; import org.searchisko.api.model.AppConfiguration; import org.searchisko.api.service.ContributorProfileService; import org.searchisko.api.util.SearchUtils; import org.searchisko.contribprofile.model.ContributorProfile; /** * Jive 6 implementation of Contributor Provider. <br/> * Documentation for Jive 6 REST API: https://developers.jivesoftware.com/api/v3/rest/PersonService.html Access to Jive * 6 has to be authenticated. See AppConfiguration * * @author Libor Krzyzanek * @author Vlastimil Elias (velias at redhat dot com) */ @Named @Stateless @LocalBean public class Jive6ContributorProfileProvider implements ContributorProfileProvider { protected static final String DOMAIN_JBOSS_ORG = "jboss.org"; protected static final String DOMAIN_GOOGLE_COM = "google.com"; protected static final String DOMAIN_LINKEDIN_COM = "linkedin.com"; protected static final String DOMAIN_FACEBOOK_COM = "facebook.com"; protected static final String DOMAIN_GITHUB_COM = "github.com"; protected static final String DOMAIN_TWITTER_COM = "twitter.com"; protected static final String HIRE_DATE_KEY = "Hire Date"; protected static final String LEAVE_DATE_KEY = "Leaving Date"; protected static final String DATE_PATTERN = "MM/dd/yyyy"; private static final Map<String,String> countryNameToCode; static { Map<String,String> tempCountryNameToCode = new HashMap<String, String>(); for( String isoCountry : Locale.getISOCountries() ) { tempCountryNameToCode.put(new Locale("",isoCountry).getDisplayCountry(Locale.US), isoCountry); } countryNameToCode = Collections.unmodifiableMap(tempCountryNameToCode); } @Inject protected Logger log; public static final String JIVE_PROFILE_REST_API = "/api/core/v3/people/username/"; public static final String JIVE_ALL_PROFILES_REST_API = "/api/core/v3/people/?fields=jive,username,name,emails,displayName,tags,updated,resources,thumbnailUrl,published,addresses"; @Inject protected AppConfiguration appConfiguration; protected CloseableHttpClient httpClient; @PostConstruct public void init() { httpClient = HttpClients.custom().build(); } @Override public List<ContributorProfile> getAllProfiles(Integer start, Integer size) { String jive6Url = appConfiguration.getContributorProfileProviderConfig().getUrlbase(); String url = jive6Url + JIVE_ALL_PROFILES_REST_API; url = addPaginationToUrl(url, start, size); log.log(Level.FINE, "Get data from Jive using url: {0}", url); byte[] data = getData(url); if (data == null) { return null; } return convertToProfiles(data); } @SuppressWarnings("unchecked") protected List<ContributorProfile> convertToProfiles(byte[] data) { List<ContributorProfile> ret = new LinkedList<>(); Map<String, Object> map = convertJSONMap(data); List<Map<String, Object>> profiles = (List<Map<String, Object>>) map.get("list"); for (Map<String, Object> profile : profiles) { ContributorProfile contributorProfile = mapRawJsonData(profile); ret.add(contributorProfile); } return ret; } protected String addPaginationToUrl(String url, Integer start, Integer size) { if (start != null) { url += "&startIndex=" + start; } if (size != null) { url += "&count=" + size; } return url; } protected byte[] getData(String url) { String username = appConfiguration.getContributorProfileProviderConfig().getUsername(); String password = appConfiguration.getContributorProfileProviderConfig().getPassword(); return getData(url, username, password); } /** * Get data from provider * * @param url * @param username * @param password * @return data or null if something goes wrong. */ protected byte[] getData(String url, String username, String password) { if (StringUtils.isBlank(username) && StringUtils.isBlank(password)) { log.log(Level.SEVERE, "Jive provider configuration has username and password blank."); return null; } HttpGet httpGet = new HttpGet(url); CloseableHttpResponse response = null; try { UsernamePasswordCredentials credentials = new UsernamePasswordCredentials(username, password); BasicScheme bs = new BasicScheme(); httpGet.addHeader(bs.authenticate(credentials, httpGet, null)); response = httpClient.execute(httpGet); HttpEntity entity = response.getEntity(); if (response.getStatusLine().getStatusCode() >= 300) { String output = EntityUtils.toString(entity); log.log(Level.WARNING, "Cannot get data from Jive, response: {0}, code: {1}", new Object[] { output, response.getStatusLine().getStatusCode() }); return null; } byte[] data = EntityUtils.toByteArray(entity); if (log.isLoggable(Level.FINEST)) { log.log(Level.FINEST, "data from Jive: {0}", new String(data)); } return data; } catch (IOException e) { log.log(Level.SEVERE, "Cannot get date from Jive", e); return null; } catch (AuthenticationException e) { throw new RuntimeException(e); } finally { if (response != null) try { response.close(); } catch (IOException e) { // ignore } } } @Override public ContributorProfile getProfile(String jbossorgUsername) { String jive6Url = appConfiguration.getContributorProfileProviderConfig().getUrlbase(); byte[] data = getData(jive6Url + JIVE_PROFILE_REST_API + jbossorgUsername); if (data == null) { return null; } return convertToProfile(data); } protected ContributorProfile convertToProfile(byte[] data) { Map<String, Object> map = convertJSONMap(data); return mapRawJsonData(map); } @PreDestroy public void destroy() { try { httpClient.close(); } catch (IOException e) { log.warning(e.getMessage()); } } private static final byte FIRST_RESPONSE_BYTE = "{".getBytes()[0]; protected Map<String, Object> convertJSONMap(byte[] data) { ObjectMapper mapper = new ObjectMapper(); try { // next code is used to remove weird first line of JIVE response. Simply we find first { which means begin of JSON // data. int startOffset = 0; for (byte b : data) { if (FIRST_RESPONSE_BYTE == b) { break; } startOffset++; } // TODO CONTRIBUTOR_PROFILE is encoding (UTF-8 or ISO-xx etc.) of profile data from JIVE server handled correctly // here? return mapper.readValue(data, startOffset, data.length, new TypeReference<Map<String, Object>>() { }); } catch (IOException e) { String msg = "Cannot parse Jive 6 profile json data: " + e.getMessage(); log.log(Level.WARNING, msg); throw new RuntimeException(msg); } } @SuppressWarnings("unchecked") protected ContributorProfile mapRawJsonData(Map<String, Object> map) { Map<String, Object> jiveObject = (Map<String, Object>) map.get("jive"); Map<String, Object> nameObject = (Map<String, Object>) map.get("name"); List<Map<String, Object>> emailsObject = (List<Map<String, Object>>) map.get("emails"); Map<String, List<String>> typeSpecificCodes = new HashMap<>(); // TODO CONTRIBUTOR_PROFILE Jive6 provider make type_specific_code obtaining from jive data configurable addTypeSpecificCode(typeSpecificCodes, ContributorProfileService.FIELD_TSC_JBOSSORG_USERNAME, (String) jiveObject.get("username")); addTypeSpecificCode(typeSpecificCodes, ContributorProfileService.FIELD_TSC_GITHUB_USERNAME, getProfileValue(jiveObject, "github Username")); Map<String, Object> profileData = mapProfileData(map, jiveObject); String primaryEmail = SearchUtils.trimToNull(getPrimaryEmail(emailsObject)); if (primaryEmail == null) { throw new SettingsException( "Jive Contributor Profile primary email is missing, probably due incorrect permissions in Jive."); } String fullName = (String) nameObject.get("formatted"); if (StringUtils.isBlank(fullName)) { fullName = (String) map.get("displayName"); } Long hireDate = (Long) profileData.remove(HIRE_DATE_KEY); Long leaveDate = (Long) profileData.remove(LEAVE_DATE_KEY); ContributorProfile profile = new ContributorProfile((String) profileData.get(ContentObjectFields.SYS_ID), fullName, primaryEmail, getEmails(emailsObject), typeSpecificCodes, hireDate, leaveDate); profile.setProfileData(profileData); return profile; } protected final String JIVE_PROFILE_NAME_KEY = "jive_label"; protected final String JIVE_PROFILE_VALUE_KEY = "value"; @SuppressWarnings("unchecked") protected Map<String, Object> mapProfileData(Map<String, Object> map, Map<String, Object> jiveObject) { Map<String, Object> profileData = new LinkedHashMap<>(); Object username = jiveObject.get("username"); profileData.put(ContentObjectFields.SYS_CONTENT_PROVIDER, "jbossorg"); profileData.put(ContentObjectFields.SYS_TYPE, "contributor_profile"); profileData.put(ContentObjectFields.SYS_CONTENT_TYPE, "jbossorg_contributor_profile"); profileData.put(ContentObjectFields.SYS_CONTENT_ID, username); profileData.put("id", "jbossorg_contributor_profile-" + username); profileData.put(ContentObjectFields.SYS_ID, "jbossorg_contributor_profile-" + username); profileData.put("name", map.get("name")); profileData.put("displayName", map.get("displayName")); profileData.put(ContentObjectFields.SYS_TITLE, map.get("displayName")); profileData.put(ContentObjectFields.TAGS, map.get("tags")); profileData.put(ContentObjectFields.SYS_TAGS, map.get("tags")); profileData.put("published", map.get("published")); profileData.put(ContentObjectFields.SYS_CREATED, map.get("published")); profileData.put("updated", map.get("updated")); profileData.put(ContentObjectFields.SYS_UPDATED, new Date()); if( map.get("addresses")!=null) { List<Map<String,Object>> addressesList = (List<Map<String, Object>>) map.get("addresses"); String country = null; for ( Map<String,Object> address : addressesList ) { Map<String,Object> addressFields = (Map<String,Object>)address.get(JIVE_PROFILE_VALUE_KEY); if( addressFields==null ) continue; if( addressFields.containsKey("country") && addressFields.get("country").toString().trim().length()>0 ) { country = addressFields.get("country").toString(); // We prefer home address over other so if we found country in it we can finish searching. if( address.get(JIVE_PROFILE_NAME_KEY).toString().compareTo("Home Address")!=0 ) { break; } } } if( country!=null && countryNameToCode.containsKey(country) ) { profileData.put( "country" , countryNameToCode.get(country) ); } } Map<String, Object> resourcesObject = (Map<String, Object>) map.get("resources"); String profileUrl = null; try { profileUrl = ((Map<String, Object>) resourcesObject.get("html")).get("ref").toString(); profileData.put("profileUrl", profileUrl); profileData.put(ContentObjectFields.SYS_URL_VIEW, profileUrl); } catch (Exception e) { log.log(Level.WARNING, "Cannot get profile URL for username: {0}", username); } profileData.put("timeZone", jiveObject.get("timeZone")); profileData.put("thumbnailUrl", map.get("thumbnailUrl")); Map<String, Map<String, Object>> accounts = new LinkedHashMap<>(); List<Map<String, Object>> jiveProfile = (List<Map<String, Object>>) jiveObject.get("profile"); if (jiveProfile != null) { for (Map<String, Object> p : jiveProfile) { String profileNameKey = (String) p.get(JIVE_PROFILE_NAME_KEY); switch (profileNameKey) { case "Biography": profileData.put("aboutMe", p.get(JIVE_PROFILE_VALUE_KEY)); profileData.put("sys_description", p.get(JIVE_PROFILE_VALUE_KEY)); break; case "Twitter Username": storeAccountInfo(accounts, DOMAIN_TWITTER_COM, DCP_PROFILE_ACCOUNT_USERNAME, p.get(JIVE_PROFILE_VALUE_KEY)); break; case "Twitter URL": storeAccountInfo(accounts, DOMAIN_TWITTER_COM, DCP_PROFILE_ACCOUNT_LINK, p.get(JIVE_PROFILE_VALUE_KEY)); break; case "github Username": storeAccountInfo(accounts, DOMAIN_GITHUB_COM, DCP_PROFILE_ACCOUNT_USERNAME, p.get(JIVE_PROFILE_VALUE_KEY)); break; case "Github Profile": storeAccountInfo(accounts, DOMAIN_GITHUB_COM, DCP_PROFILE_ACCOUNT_LINK, p.get(JIVE_PROFILE_VALUE_KEY)); break; case "Facebook Username": storeAccountInfo(accounts, DOMAIN_FACEBOOK_COM, DCP_PROFILE_ACCOUNT_USERNAME, p.get(JIVE_PROFILE_VALUE_KEY)); break; case "Facebook Profile": storeAccountInfo(accounts, DOMAIN_FACEBOOK_COM, DCP_PROFILE_ACCOUNT_LINK, p.get(JIVE_PROFILE_VALUE_KEY)); break; case "LinkedIn Username": storeAccountInfo(accounts, DOMAIN_LINKEDIN_COM, DCP_PROFILE_ACCOUNT_USERNAME, p.get(JIVE_PROFILE_VALUE_KEY)); break; case "LinkedIn Profile": storeAccountInfo(accounts, DOMAIN_LINKEDIN_COM, DCP_PROFILE_ACCOUNT_LINK, p.get(JIVE_PROFILE_VALUE_KEY)); break; case "Google Profile": storeAccountInfo(accounts, DOMAIN_GOOGLE_COM, DCP_PROFILE_ACCOUNT_LINK, p.get(JIVE_PROFILE_VALUE_KEY)); break; case "Company name": profileData.put("company", p.get(JIVE_PROFILE_VALUE_KEY).toString() ); break; case HIRE_DATE_KEY: case LEAVE_DATE_KEY: String rawValue = (String) p.get(JIVE_PROFILE_VALUE_KEY); SimpleDateFormat sdf = new SimpleDateFormat(DATE_PATTERN); sdf.setTimeZone(TimeZone.getTimeZone("GMT")); sdf.setLenient(false); try { profileData.put(profileNameKey, sdf.parse(rawValue).getTime()); } catch (ParseException e) { // nothing to set } break; } } } else { log.log(Level.WARNING, "Missing ''profile'' part of data for username: {0}", username); } storeAccountInfo(accounts, DOMAIN_JBOSS_ORG, DCP_PROFILE_ACCOUNT_USERNAME, username); storeAccountInfo(accounts, DOMAIN_JBOSS_ORG, DCP_PROFILE_ACCOUNT_LINK, profileUrl); if (!accounts.isEmpty()) { profileData.put(DCP_PROFILE_ACCOUNTS, new ArrayList<>(accounts.values())); } return profileData; } protected void storeAccountInfo(Map<String, Map<String, Object>> accounts, String domainName, String infoKey, Object infoValue) { if (infoValue == null) return; String valueString = infoValue.toString(); if (SearchUtils.isBlank(valueString)) return; Map<String, Object> a = accounts.get(domainName); if (a == null) { a = new HashMap<>(3); a.put(DCP_PROFILE_ACCOUNT_DOMAIN, domainName); accounts.put(domainName, a); } a.put(infoKey, valueString); } /** * Safe getter for <code>jive.profile</code> field value. * * @param jiveObject to get profile value from * @param jiveLabel <code>jive_label</code> for profile field value we can obtain * @return profile field value or null */ @SuppressWarnings("unchecked") protected String getProfileValue(Map<String, Object> jiveObject, String jiveLabel) { if (jiveObject == null) return null; try { List<Map<String, Object>> profileObject = (List<Map<String, Object>>) jiveObject.get("profile"); if (profileObject != null) { for (Map<String, Object> profileItem : profileObject) { if (jiveLabel.equals(profileItem.get(JIVE_PROFILE_NAME_KEY))) { return (String) profileItem.get(JIVE_PROFILE_VALUE_KEY); } } } } catch (ClassCastException e) { log.warning("bad structure of jive profile data"); } return null; } /** * Get list of email addresses from JIVE profile <code>emails</code> structure. * * @param emailsObject JIVE profile <code>emails</code> structure * @return list of emails. never null. */ protected List<String> getEmails(List<Map<String, Object>> emailsObject) { List<String> ret = new ArrayList<>(); if (emailsObject != null) { for (Map<String, Object> emailObject : emailsObject) { String email = SearchUtils.trimToNull((String) emailObject.get("value")); if (email != null && !ret.contains(email)) { ret.add(email); } } } return ret; } /** * @param typeSpecificCodes structure to add code into * @param fieldTcsName name of Type Specific Code to add * @param value of code. May be null or empty - ignored in this case. */ protected void addTypeSpecificCode(Map<String, List<String>> typeSpecificCodes, String fieldTcsName, String value) { value = SearchUtils.trimToNull(value); if (value != null) { List<String> vl = typeSpecificCodes.get(fieldTcsName); if (vl == null) { vl = new ArrayList<>(); typeSpecificCodes.put(fieldTcsName, vl); } if (!vl.contains(value)) vl.add(value); } } /** * Get primary email address from JIVE profile <code>emails</code> structure. * * @param emailsObject JIVE profile <code>emails</code> structure. * @return primary email address or null if not found */ protected String getPrimaryEmail(List<Map<String, Object>> emailsObject) { if (emailsObject == null) { log.log(Level.FINE, "Emails not returned in response from Jive. Probably bad authentication."); return null; } for (Map<String, Object> emailObject : emailsObject) { String email = (String) emailObject.get("value"); if ((boolean) emailObject.get("primary")) { return email; } } return null; } }
/* * Copyright 2015-2021 Micro Focus or one of its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hpe.caf.api; import com.hpe.caf.naming.Name; import com.hpe.caf.naming.ServicePath; import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.text.StrLookup; import org.apache.commons.lang3.text.StrSubstitutor; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.validation.ConstraintViolation; import javax.validation.Validation; import javax.validation.Validator; import java.beans.IntrospectionException; import java.beans.PropertyDescriptor; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Iterator; import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; /** * Partial implementation of a ManagedConfigurationSource that performs hierarchical lookups based upon the service's ServicePath, and * recursive lookup for configuration objects that themselves have configuration in marked with the @Configuration annotation. */ public abstract class CafConfigurationSource implements ManagedConfigurationSource { private final Cipher security; private final ServicePath id; private final Decoder decoder; private final boolean isSubstitutorEnabled; private final Validator validator = Validation.buildDefaultValidatorFactory().getValidator(); private final AtomicInteger confRequests = new AtomicInteger(0); private final AtomicInteger confErrors = new AtomicInteger(0); private static final Logger LOG = LoggerFactory.getLogger(CafConfigurationSource.class); /** * Each ConfigurationProvider itself takes some initial source of configuration which it may or may not use to initialise itself. The * initial "bootstrap" configuration comes from the worker core itself. * * @param bootstrapProvider the initial provider of configuration * @param cipher for decrypting information in a configuration file * @param servicePath to localise configuration for this service * @param decoder provides a mechanism to decode the configuration format */ public CafConfigurationSource(final BootstrapConfiguration bootstrapProvider, final Cipher cipher, final ServicePath servicePath, final Decoder decoder) { this.security = Objects.requireNonNull(cipher); this.id = Objects.requireNonNull(servicePath); this.decoder = Objects.requireNonNull(decoder); Objects.requireNonNull(bootstrapProvider); this.isSubstitutorEnabled = getIsSubstitutorEnabled(bootstrapProvider); } /** * Acquire a configuration class from the provider. The requested class will be a simple Java object that when returned, and can be * interacted with using getters and other standard mechanisms. Configuration classes may themselves contain other configuration * objects, which will be recursively acquired if marked @Configuration. Any fields marked @Encrypted will be decrypted, any fields * marked and any validation annotations will be processed. * * @param configClass the class that represents your configuration * @param <T> the class that represents your configuration * @return the configuration class requested, if it can be deserialised * @throws ConfigurationException if the configuration class cannot be acquired or deserialised */ @Override public final <T> T getConfiguration(final Class<T> configClass) throws ConfigurationException { Objects.requireNonNull(configClass); incrementRequests(); T config = getCompleteConfig(configClass); Set<ConstraintViolation<T>> violations = getValidator().validate(config); if (violations.isEmpty()) { return config; } else { incrementErrors(); LOG.error("Configuration constraint violations found for {}: {}", configClass.getSimpleName(), violations); throw new ConfigurationException("Configuration validation failed for " + configClass.getSimpleName()); } } /** * {@inheritDoc} */ @Override public final int getConfigurationRequests() { return confRequests.get(); } /** * {@inheritDoc} */ @Override public final int getConfigurationErrors() { return confErrors.get(); } protected Cipher getCipher() { return this.security; } protected ServicePath getServicePath() { return this.id; } protected Validator getValidator() { return this.validator; } /** * Acquire and return a stream of the serialised data from the transport source. * * @param configClass the configuration class to be acquired * @param relativePath the partial service path that defines the scope to try and acquire the configuration in * @return the stream containing the serailised configuration of the class * @throws ConfigurationException if the stream cannot be acquired */ protected abstract InputStream getConfigurationStream(final Class configClass, final Name relativePath) throws ConfigurationException; /** * This is the recursive entry point for acquiring a complete configuration class to return. Attempt to acquire a deserialised object * representing the configuration class requested, and analyse it for declared fields marked @Configuration. If any are found, the * method recursively calls itself until all configuration is satisfied. * * @param configClass the class representing configuration to acquire * @param <T> the class representing configuration to acquire * @return the completed (at this level) configuration * @throws ConfigurationException if configuration cannot be acquired */ private <T> T getCompleteConfig(final Class<T> configClass) throws ConfigurationException { T config = getConfig(configClass); for (final Field f : configClass.getDeclaredFields()) { if (f.isAnnotationPresent(Configuration.class)) { try { Method setter = getMethod(f.getName(), configClass, PropertyDescriptor::getWriteMethod); if (setter != null) { setter.invoke(config, getCompleteConfig(f.getType())); } } catch (final ConfigurationException e) { LOG.debug("Didn't find any overriding configuration", e); } catch (final InvocationTargetException | IllegalAccessException e) { incrementErrors(); throw new ConfigurationException("Failed to get complete configuration for " + configClass.getSimpleName(), e); } } else if (f.getType().equals(String.class) && f.isAnnotationPresent(Encrypted.class)) { try { Method getter = getMethod(f.getName(), config.getClass(), PropertyDescriptor::getReadMethod); Method setter = getMethod(f.getName(), config.getClass(), PropertyDescriptor::getWriteMethod); if (getter != null && setter != null) { final String configValue = (String) getter.invoke(config); final String encryptedValue = isSubstitutorEnabled ? tokenSubstitutor(configValue) : configValue; setter.invoke(config, getCipher().decrypt(encryptedValue)); } } catch (final CipherException | InvocationTargetException | IllegalAccessException e) { throw new ConfigurationException("Failed to decrypt class fields", e); } } else if (isSubstitutorEnabled && f.getType().equals(String.class)) { try { String propertyName = f.getName(); Method getter = getMethod(propertyName, config.getClass(), PropertyDescriptor::getReadMethod); Method setter = getMethod(propertyName, config.getClass(), PropertyDescriptor::getWriteMethod); if (getter != null && setter != null) { // Property value may contain tokens that require substitution. String propertyValueByToken = tokenSubstitutor((String) getter.invoke(config)); setter.invoke(config, propertyValueByToken); } } catch (final InvocationTargetException | IllegalAccessException e) { throw new ConfigurationException("Failed to get complete configuration for " + configClass.getSimpleName(), e); } } } return config; } /** * Acquire, decode and decrypt a configuration object from a data stream. * * @param configClass the class representing configuration to acquire * @param <T> the class representing configuration to acquire * @return the decoded configuration object * @throws ConfigurationException if the configuration cannot be acquired */ private <T> T getConfig(final Class<T> configClass) throws ConfigurationException { Iterator<Name> it = getServicePath().descendingPathIterator(); while (it.hasNext()) { try (InputStream in = getConfigurationStream(configClass, it.next())) { return decoder.deserialise(in, configClass); } catch (final ConfigurationException e) { LOG.trace("No configuration at this path level", e); } catch (final CodecException | IOException e) { incrementErrors(); throw new ConfigurationException("Failed to get configuration for " + configClass.getSimpleName(), e); } } incrementErrors(); throw new ConfigurationException("No configuration found for " + configClass.getSimpleName()); } /** * Checks whether the string substitution functionality should be enabled. * * @param bootstrapConfig used to provide basic, initial startup configuration * @return true if the configuration substitution should be performed */ private static boolean getIsSubstitutorEnabled(final BootstrapConfiguration bootstrapConfig) { final String ENABLE_SUBSTITUTOR_CONFIG_KEY = "CAF_CONFIG_ENABLE_SUBSTITUTOR"; final boolean ENABLE_SUBSTITUTOR_CONFIG_DEFAULT = true; // Return the default if the setting is not configured if (!bootstrapConfig.isConfigurationPresent(ENABLE_SUBSTITUTOR_CONFIG_KEY)) { return ENABLE_SUBSTITUTOR_CONFIG_DEFAULT; } // Return the configured setting. // The ConfigurationException should never happen since isConfigurationPresent() has already been called. try { return bootstrapConfig.getConfigurationBoolean(ENABLE_SUBSTITUTOR_CONFIG_KEY); } catch (final ConfigurationException ex) { throw new RuntimeException(ex); } } private static String tokenSubstitutor(final String source) { final StrSubstitutor strSubstitutor = new StrSubstitutor( new StrLookup<Object>() { @Override public String lookup(final String key) { return (System.getProperty(key) != null) ? System.getProperty(key) : System.getenv(key); } }); return strSubstitutor.replace(source); } /** * Increase the number of configuration requests recorded. */ protected void incrementRequests() { this.confRequests.incrementAndGet(); } /** * Increase the number of configuration errors recorded. */ protected void incrementErrors() { this.confErrors.incrementAndGet(); } private Method getMethod( final String propertyName, final Class<?> beanClass, final Function<PropertyDescriptor, Method> function ) { try { PropertyDescriptor propertyDescriptor = new PropertyDescriptor(propertyName, beanClass); return function.apply(propertyDescriptor); } catch (final IntrospectionException e) { LOG.debug(String.format("Unable to " + "create Property Descriptor from field %s :", propertyName) + System.lineSeparator() + ExceptionUtils.getStackTrace(e)); return null; } } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.application.options; import com.intellij.codeInsight.CodeInsightSettings; import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer; import com.intellij.ide.DataManager; import com.intellij.ide.PowerSaveMode; import com.intellij.ide.ui.UISettings; import com.intellij.lang.LangBundle; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.CommonDataKeys; import com.intellij.openapi.actionSystem.IdeActions; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtilRt; import com.intellij.ui.IdeUICustomization; import com.intellij.ui.JBColor; import com.intellij.ui.components.JBCheckBox; import com.intellij.ui.components.JBLabel; import com.intellij.ui.components.JBRadioButton; import com.intellij.uiDesigner.core.GridConstraints; import com.intellij.uiDesigner.core.GridLayoutManager; import org.intellij.lang.annotations.MagicConstant; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.event.ChangeListener; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.List; public class CodeCompletionPanel { JPanel myPanel; private JCheckBox myCbAutocompletion; private JCheckBox myCbAutopopupJavaDoc; private JTextField myAutopopupJavaDocField; private JLabel myAutoInsertLabel; private JCheckBox myCbOnCodeCompletion; private JCheckBox myCbOnSmartTypeCompletion; private JCheckBox myCbParameterInfoPopup; private JTextField myParameterInfoDelayField; private JCheckBox myCbShowFullParameterSignatures; private JCheckBox myCbSorting; private JBCheckBox myCbSelectByChars; private JCheckBox myCbCompleteFunctionWithParameters; private JBCheckBox myCbMatchCase; private JBRadioButton myFirstLetterOnly; private JBRadioButton myAllLetters; private JBLabel myBasicShortcut; private JBLabel mySmartShortcut; private JPanel myCbOnCodeCompletionPanel; private JPanel myCbOnSmartTypeCompletionPanel; private JPanel myAddonPanelAfter; private JPanel myAddonPanelBefore; public CodeCompletionPanel(List<? extends JComponent> optionAddons, List<? extends JComponent> sectionAddons) { ChangeListener updateCaseCheckboxes = __ -> { myFirstLetterOnly.setEnabled(myCbMatchCase.isSelected()); myAllLetters.setEnabled(myCbMatchCase.isSelected()); }; myCbMatchCase.addChangeListener(updateCaseCheckboxes); updateCaseCheckboxes.stateChanged(null); ActionManager actionManager = ActionManager.getInstance(); myBasicShortcut.setText(KeymapUtil.getFirstKeyboardShortcutText(actionManager.getAction(IdeActions.ACTION_CODE_COMPLETION))); mySmartShortcut.setText(KeymapUtil.getFirstKeyboardShortcutText(actionManager.getAction(IdeActions.ACTION_SMART_TYPE_COMPLETION))); myBasicShortcut.setForeground(JBColor.GRAY); mySmartShortcut.setForeground(JBColor.GRAY); myCbSelectByChars.setText(IdeUICustomization.getInstance().getSelectAutopopupByCharsText()); myCbAutocompletion.addActionListener( new ActionListener() { @Override public void actionPerformed(@NotNull ActionEvent event) { boolean selected = myCbAutocompletion.isSelected(); myCbSelectByChars.setEnabled(selected); } } ); myCbAutopopupJavaDoc.addActionListener( new ActionListener() { @Override public void actionPerformed(@NotNull ActionEvent event) { myAutopopupJavaDocField.setEnabled(myCbAutopopupJavaDoc.isSelected()); } } ); myCbParameterInfoPopup.addActionListener( new ActionListener() { @Override public void actionPerformed(@NotNull ActionEvent event) { myParameterInfoDelayField.setEnabled(myCbParameterInfoPopup.isSelected()); } } ); hideOption(myCbOnSmartTypeCompletionPanel, OptionId.COMPLETION_SMART_TYPE); hideOption(myCbOnCodeCompletionPanel, OptionId.AUTOCOMPLETE_ON_BASIC_CODE_COMPLETION); hideOption(myCbCompleteFunctionWithParameters, OptionId.SHOW_PARAMETER_NAME_HINTS_ON_COMPLETION); if(!myCbOnSmartTypeCompletionPanel.isVisible() && !myCbOnCodeCompletionPanel.isVisible()) myAutoInsertLabel.setVisible(false); addExtensions(optionAddons, myAddonPanelBefore); addExtensions(sectionAddons, myAddonPanelAfter); reset(); } private static void addExtensions(@NotNull List<? extends JComponent> customComponents, @NotNull JPanel addonPanel) { if (customComponents.isEmpty()) { addonPanel.setVisible(false); return; } final GridLayoutManager manager = new GridLayoutManager(customComponents.size(), 1); addonPanel.setLayout(manager); final GridConstraints gc = new GridConstraints(); gc.setUseParentLayout(true); gc.setFill(GridConstraints.FILL_BOTH); gc.setVSizePolicy(GridConstraints.SIZEPOLICY_CAN_SHRINK); for (int i = 0; i < customComponents.size(); i++) { JComponent c = customComponents.get(i); gc.setRow(i); addonPanel.add(c, gc); } } private static void hideOption(JComponent component, OptionId id) { component.setVisible(OptionsApplicabilityFilter.isApplicable(id)); } public void reset() { CodeInsightSettings codeInsightSettings = CodeInsightSettings.getInstance(); switch(codeInsightSettings.COMPLETION_CASE_SENSITIVE){ case CodeInsightSettings.ALL: myCbMatchCase.setSelected(true); myAllLetters.setSelected(true); break; case CodeInsightSettings.NONE: myCbMatchCase.setSelected(false); break; default: myCbMatchCase.setSelected(true); myFirstLetterOnly.setSelected(true); break; } myCbSelectByChars.setSelected(codeInsightSettings.isSelectAutopopupSuggestionsByChars()); myCbOnCodeCompletion.setSelected(codeInsightSettings.AUTOCOMPLETE_ON_CODE_COMPLETION); myCbOnSmartTypeCompletion.setSelected(codeInsightSettings.AUTOCOMPLETE_ON_SMART_TYPE_COMPLETION); myCbAutocompletion.setSelected(codeInsightSettings.AUTO_POPUP_COMPLETION_LOOKUP); myCbAutopopupJavaDoc.setSelected(codeInsightSettings.AUTO_POPUP_JAVADOC_INFO); myAutopopupJavaDocField.setEnabled(codeInsightSettings.AUTO_POPUP_JAVADOC_INFO); myAutopopupJavaDocField.setText(String.valueOf(codeInsightSettings.JAVADOC_INFO_DELAY)); myCbParameterInfoPopup.setSelected(codeInsightSettings.AUTO_POPUP_PARAMETER_INFO); myParameterInfoDelayField.setEnabled(codeInsightSettings.AUTO_POPUP_PARAMETER_INFO); myParameterInfoDelayField.setText(String.valueOf(codeInsightSettings.PARAMETER_INFO_DELAY)); myCbShowFullParameterSignatures.setSelected(codeInsightSettings.SHOW_FULL_SIGNATURES_IN_PARAMETER_INFO); myCbCompleteFunctionWithParameters.setSelected(codeInsightSettings.SHOW_PARAMETER_NAME_HINTS_ON_COMPLETION); myCbAutocompletion.setSelected(codeInsightSettings.AUTO_POPUP_COMPLETION_LOOKUP); myCbSorting.setSelected(UISettings.getInstance().getSortLookupElementsLexicographically()); myCbAutocompletion.setText(ApplicationBundle.message("editbox.auto.complete") + (PowerSaveMode.isEnabled() ? LangBundle.message("label.not.available.in.power.save.mode") : "")); } public void apply() { CodeInsightSettings codeInsightSettings = CodeInsightSettings.getInstance(); codeInsightSettings.COMPLETION_CASE_SENSITIVE = getCaseSensitiveValue(); codeInsightSettings.setSelectAutopopupSuggestionsByChars(myCbSelectByChars.isSelected()); codeInsightSettings.AUTOCOMPLETE_ON_CODE_COMPLETION = myCbOnCodeCompletion.isSelected(); codeInsightSettings.AUTOCOMPLETE_ON_SMART_TYPE_COMPLETION = myCbOnSmartTypeCompletion.isSelected(); codeInsightSettings.SHOW_FULL_SIGNATURES_IN_PARAMETER_INFO = myCbShowFullParameterSignatures.isSelected(); codeInsightSettings.AUTO_POPUP_PARAMETER_INFO = myCbParameterInfoPopup.isSelected(); codeInsightSettings.AUTO_POPUP_COMPLETION_LOOKUP = myCbAutocompletion.isSelected(); codeInsightSettings.AUTO_POPUP_JAVADOC_INFO = myCbAutopopupJavaDoc.isSelected(); codeInsightSettings.PARAMETER_INFO_DELAY = getIntegerValue(myParameterInfoDelayField.getText()); codeInsightSettings.JAVADOC_INFO_DELAY = getIntegerValue(myAutopopupJavaDocField.getText()); codeInsightSettings.SHOW_PARAMETER_NAME_HINTS_ON_COMPLETION = myCbCompleteFunctionWithParameters.isSelected(); UISettings.getInstance().setSortLookupElementsLexicographically(myCbSorting.isSelected()); final Project project = CommonDataKeys.PROJECT.getData(DataManager.getInstance().getDataContext(myPanel)); if (project != null){ DaemonCodeAnalyzer.getInstance(project).settingsChanged(); } } public boolean isModified() { CodeInsightSettings codeInsightSettings = CodeInsightSettings.getInstance(); boolean isModified = false; //noinspection ConstantConditions isModified |= getCaseSensitiveValue() != codeInsightSettings.COMPLETION_CASE_SENSITIVE; isModified |= isModified(myCbOnCodeCompletion, codeInsightSettings.AUTOCOMPLETE_ON_CODE_COMPLETION); isModified |= isModified(myCbSelectByChars, codeInsightSettings.isSelectAutopopupSuggestionsByChars()); isModified |= isModified(myCbOnSmartTypeCompletion, codeInsightSettings.AUTOCOMPLETE_ON_SMART_TYPE_COMPLETION); isModified |= isModified(myCbShowFullParameterSignatures, codeInsightSettings.SHOW_FULL_SIGNATURES_IN_PARAMETER_INFO); isModified |= isModified(myCbParameterInfoPopup, codeInsightSettings.AUTO_POPUP_PARAMETER_INFO); isModified |= isModified(myCbAutocompletion, codeInsightSettings.AUTO_POPUP_COMPLETION_LOOKUP); isModified |= isModified(myCbCompleteFunctionWithParameters, codeInsightSettings.SHOW_PARAMETER_NAME_HINTS_ON_COMPLETION); isModified |= isModified(myCbAutopopupJavaDoc, codeInsightSettings.AUTO_POPUP_JAVADOC_INFO); isModified |= isModified(myParameterInfoDelayField, codeInsightSettings.PARAMETER_INFO_DELAY); isModified |= isModified(myAutopopupJavaDocField, codeInsightSettings.JAVADOC_INFO_DELAY); isModified |= isModified(myCbSorting, UISettings.getInstance().getSortLookupElementsLexicographically()); return isModified; } private static boolean isModified(JCheckBox checkBox, boolean value) { return checkBox.isSelected() != value; } private static boolean isModified(JTextField textField, int value) { return getIntegerValue(textField.getText()) != value; } private static int getIntegerValue(String s) { int value = StringUtilRt.parseInt(s, 0); return Math.max(value, 0); } @MagicConstant(intValues = {CodeInsightSettings.ALL, CodeInsightSettings.NONE, CodeInsightSettings.FIRST_LETTER}) private int getCaseSensitiveValue() { if (!myCbMatchCase.isSelected()) return CodeInsightSettings.NONE; return myAllLetters.isSelected() ? CodeInsightSettings.ALL : CodeInsightSettings.FIRST_LETTER; } }
/** * Copyright (c) 2013-2015, jcabi.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the jcabi.com nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.jcabi.github; import com.google.common.base.Optional; import com.jcabi.aspects.Immutable; import com.jcabi.aspects.Loggable; import java.io.IOException; import javax.json.Json; import javax.json.JsonObject; import javax.json.JsonObjectBuilder; import javax.validation.constraints.NotNull; import lombok.EqualsAndHashCode; import lombok.ToString; /** * Github status. * * <p>The status exposes all available properties through its * {@code json()} method. However, it is recommended to use its * "smart" decorator, which helps you to get access to all JSON properties, * for example: * * <pre> URL url = new Status.Smart(status).url();</pre> * * @author Marcin Cylke (marcin.cylke+github@gmail.com) * @version $Id$ * @since 0.23 * @see <a href="https://developer.github.com/v3/repos/statuses/">Repo statuses</a> */ @Immutable @SuppressWarnings("PMD.TooManyMethods") public interface Statuses extends JsonReadable { /** * Associated commit. * @return Commit */ @NotNull(message = "commit is never NULL") Commit commit(); /** * Create new status. * @param status Add this status * @throws java.io.IOException If there is any I/O problem * @return The added status * @see <a href="https://developer.github.com/v3/repos/statuses/#create-a-status">Create a Status</a> */ @NotNull(message = "status is never NULL") Status create( @NotNull(message = "status creation data can't be NULL") final StatusCreate status ) throws IOException; /** * List all statuses for a given ref. * @param ref It can be a SHA, a branch name, or a tag name. * @return Iterable of statuses * @see <a href="https://developer.github.com/v3/repos/statuses/#list-statuses-for-a-specific-ref">List Statuses for a specific Ref</a> */ @NotNull(message = "iterable of statuses is never NULL") Iterable<Status> list( @NotNull(message = "ref can't be NULL") final String ref ); /** * Data to use when creating a new GitHub commit status. * * @author Chris Rebert (github@rebertia.com) * @version $Id$ * @since 0.24 * @see <a href="https://developer.github.com/v3/repos/statuses/#create-a-status">Create a Status</a> */ @ToString @Loggable(Loggable.DEBUG) @EqualsAndHashCode(of = { "state", "description", "context", "targeturl" }) final class StatusCreate implements JsonReadable { /** * State. */ private final transient Status.State state; /** * Description. */ private final transient String description; /** * Context string. */ private final transient Optional<String> context; /** * Target URL. */ private final transient Optional<String> targeturl; /** * Public ctor. * @param stat State */ public StatusCreate( @NotNull(message = "state can't be NULL") final Status.State stat ) { this( stat, "", Optional.<String>absent(), Optional.<String>absent() ); } /** * Private ctor. * @param stat State * @param desc Description * @param cntxt Context * @param target Target URL * @checkstyle ParameterNumberCheck (10 lines) */ private StatusCreate( @NotNull(message = "state can't be NULL") final Status.State stat, @NotNull(message = "description can't be NULL") final String desc, @NotNull(message = "context optional itself can't be NULL") final Optional<String> cntxt, @NotNull(message = "target URL optional itself can't be NULL") final Optional<String> target ) { this.state = stat; this.description = desc; this.context = cntxt; this.targeturl = target; } /** * Returns a StatusCreate with the given state. * @param stat State * @return StatusCreate */ public StatusCreate withState(final Status.State stat) { return new StatusCreate( stat, this.description, this.context, this.targeturl ); } /** * Returns a StatusCreate with the given description. * @param desc Description * @return StatusCreate */ public StatusCreate withDescription(final String desc) { return new StatusCreate( this.state, desc, this.context, this.targeturl ); } /** * Returns a StatusCreate with the given context. * @param cntxt Context * @return StatusCreate */ public StatusCreate withContext(final Optional<String> cntxt) { return new StatusCreate( this.state, this.description, cntxt, this.targeturl ); } /** * Returns a StatusCreate with the given target URL. * @param target Target URL * @return StatusCreate */ public StatusCreate withTargetUrl(final Optional<String> target) { return new StatusCreate( this.state, this.description, this.context, target ); } @Override @NotNull(message = "JSON is never NULL") public JsonObject json() { final JsonObjectBuilder builder = Json.createObjectBuilder() .add("state", this.state.identifier()) .add("description", this.description); if (this.context.isPresent()) { builder.add("context", this.context.get()); } if (this.targeturl.isPresent()) { builder.add("target_url", this.targeturl.get()); } return builder.build(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.webapp; import com.google.inject.Inject; import com.google.inject.Singleton; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.RMServerUtils; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppAttemptInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppAttemptsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ApplicationStatisticsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterMetricsInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FairSchedulerInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FifoSchedulerInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodesInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.SchedulerInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.SchedulerTypeInfo; import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.StatisticsItemInfo; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.webapp.BadRequestException; import org.apache.hadoop.yarn.webapp.NotFoundException; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; @Singleton @Path("/ws/v1/cluster") public class RMWebServices { private static final Log LOG = LogFactory.getLog(RMWebServices.class.getName()); private static final String EMPTY = ""; private static final String ANY = "*"; private final ResourceManager rm; private static RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); private final Configuration conf; private @Context HttpServletResponse response; @Inject public RMWebServices(final ResourceManager rm, Configuration conf) { this.rm = rm; this.conf = conf; } protected Boolean hasAccess(RMApp app, HttpServletRequest hsr) { // Check for the authorization. String remoteUser = hsr.getRemoteUser(); UserGroupInformation callerUGI = null; if (remoteUser != null) { callerUGI = UserGroupInformation.createRemoteUser(remoteUser); } if (callerUGI != null && !(this.rm.getApplicationACLsManager() .checkAccess(callerUGI, ApplicationAccessType.VIEW_APP, app.getUser(), app.getApplicationId()) || this.rm.getQueueACLsManager() .checkAccess(callerUGI, QueueACL.ADMINISTER_QUEUE, app.getQueue()))) { return false; } return true; } private void init() { //clear content type response.setContentType(null); } @GET @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ClusterInfo get() { return getClusterInfo(); } @GET @Path("/info") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ClusterInfo getClusterInfo() { init(); return new ClusterInfo(this.rm); } @GET @Path("/metrics") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ClusterMetricsInfo getClusterMetricsInfo() { init(); return new ClusterMetricsInfo(this.rm, this.rm.getRMContext()); } @GET @Path("/scheduler") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public SchedulerTypeInfo getSchedulerInfo() { init(); ResourceScheduler rs = rm.getResourceScheduler(); SchedulerInfo sinfo; if (rs instanceof CapacityScheduler) { CapacityScheduler cs = (CapacityScheduler) rs; CSQueue root = cs.getRootQueue(); sinfo = new CapacitySchedulerInfo(root); } else if (rs instanceof FairScheduler) { FairScheduler fs = (FairScheduler) rs; sinfo = new FairSchedulerInfo(fs); } else if (rs instanceof FifoScheduler) { sinfo = new FifoSchedulerInfo(this.rm); } else { throw new NotFoundException("Unknown scheduler configured"); } return new SchedulerTypeInfo(sinfo); } /** * Returns all nodes in the cluster. If the states param is given, returns * all nodes that are in the comma-separated list of states. */ @GET @Path("/nodes") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public NodesInfo getNodes( @QueryParam("states") String states) { init(); ResourceScheduler sched = this.rm.getResourceScheduler(); if (sched == null) { throw new NotFoundException("Null ResourceScheduler instance"); } EnumSet<NodeState> acceptedStates; if (states == null) { acceptedStates = EnumSet.allOf(NodeState.class); } else { acceptedStates = EnumSet.noneOf(NodeState.class); for (String stateStr : states.split(",")) { acceptedStates.add(NodeState.valueOf(stateStr.toUpperCase())); } } Collection<RMNode> rmNodes = RMServerUtils.queryRMNodes(this.rm.getRMContext(), acceptedStates); NodesInfo nodesInfo = new NodesInfo(); for (RMNode rmNode : rmNodes) { NodeInfo nodeInfo = new NodeInfo(rmNode, sched); if (EnumSet .of(NodeState.LOST, NodeState.DECOMMISSIONED, NodeState.REBOOTED) .contains(rmNode.getState())) { nodeInfo.setNodeHTTPAddress(EMPTY); } nodesInfo.add(nodeInfo); } return nodesInfo; } @GET @Path("/nodes/{nodeId}") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public NodeInfo getNode( @PathParam("nodeId") String nodeId) { init(); if (nodeId == null || nodeId.isEmpty()) { throw new NotFoundException("nodeId, " + nodeId + ", is empty or null"); } ResourceScheduler sched = this.rm.getResourceScheduler(); if (sched == null) { throw new NotFoundException("Null ResourceScheduler instance"); } NodeId nid = ConverterUtils.toNodeId(nodeId); RMNode ni = this.rm.getRMContext().getActiveRMNodes().get(nid); boolean isInactive = false; if (ni == null) { ni = this.rm.getRMContext().getInactiveRMNodes().get(nid.getHost()); if (ni == null) { throw new NotFoundException("nodeId, " + nodeId + ", is not found"); } isInactive = true; } NodeInfo nodeInfo = new NodeInfo(ni, sched); if (isInactive) { nodeInfo.setNodeHTTPAddress(EMPTY); } return nodeInfo; } @GET @Path("/apps") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public AppsInfo getApps( @Context HttpServletRequest hsr, @QueryParam("state") String stateQuery, @QueryParam("states") Set<String> statesQuery, @QueryParam("finalStatus") String finalStatusQuery, @QueryParam("user") String userQuery, @QueryParam("queue") String queueQuery, @QueryParam("limit") String count, @QueryParam("startedTimeBegin") String startedBegin, @QueryParam("startedTimeEnd") String startedEnd, @QueryParam("finishedTimeBegin") String finishBegin, @QueryParam("finishedTimeEnd") String finishEnd, @QueryParam("applicationTypes") Set<String> applicationTypes, @QueryParam("applicationTags") Set<String> applicationTags) { boolean checkCount = false; boolean checkStart = false; boolean checkEnd = false; boolean checkAppTypes = false; boolean checkAppStates = false; boolean checkAppTags = false; long countNum = 0; // set values suitable in case both of begin/end not specified long sBegin = 0; long sEnd = Long.MAX_VALUE; long fBegin = 0; long fEnd = Long.MAX_VALUE; init(); if (count != null && !count.isEmpty()) { checkCount = true; countNum = Long.parseLong(count); if (countNum <= 0) { throw new BadRequestException("limit value must be greater then 0"); } } if (startedBegin != null && !startedBegin.isEmpty()) { checkStart = true; sBegin = Long.parseLong(startedBegin); if (sBegin < 0) { throw new BadRequestException( "startedTimeBegin must be greater than 0"); } } if (startedEnd != null && !startedEnd.isEmpty()) { checkStart = true; sEnd = Long.parseLong(startedEnd); if (sEnd < 0) { throw new BadRequestException("startedTimeEnd must be greater than 0"); } } if (sBegin > sEnd) { throw new BadRequestException( "startedTimeEnd must be greater than startTimeBegin"); } if (finishBegin != null && !finishBegin.isEmpty()) { checkEnd = true; fBegin = Long.parseLong(finishBegin); if (fBegin < 0) { throw new BadRequestException("finishTimeBegin must be greater than 0"); } } if (finishEnd != null && !finishEnd.isEmpty()) { checkEnd = true; fEnd = Long.parseLong(finishEnd); if (fEnd < 0) { throw new BadRequestException("finishTimeEnd must be greater than 0"); } } if (fBegin > fEnd) { throw new BadRequestException( "finishTimeEnd must be greater than finishTimeBegin"); } Set<String> appTypes = parseQueries(applicationTypes, false); if (!appTypes.isEmpty()) { checkAppTypes = true; } Set<String> appTags = parseQueries(applicationTags, false); if (!appTags.isEmpty()) { checkAppTags = true; } // stateQuery is deprecated. if (stateQuery != null && !stateQuery.isEmpty()) { statesQuery.add(stateQuery); } Set<String> appStates = parseQueries(statesQuery, true); if (!appStates.isEmpty()) { checkAppStates = true; } GetApplicationsRequest request = GetApplicationsRequest.newInstance(); if (checkStart) { request.setStartRange(sBegin, sEnd); } if (checkEnd) { request.setFinishRange(fBegin, fEnd); } if (checkCount) { request.setLimit(countNum); } if (checkAppTypes) { request.setApplicationTypes(appTypes); } if (checkAppTags) { request.setApplicationTags(appTags); } if (checkAppStates) { request.setApplicationStates(appStates); } if (queueQuery != null && !queueQuery.isEmpty()) { ResourceScheduler rs = rm.getResourceScheduler(); if (rs instanceof CapacityScheduler) { CapacityScheduler cs = (CapacityScheduler) rs; // validate queue exists try { cs.getQueueInfo(queueQuery, false, false); } catch (IOException e) { throw new BadRequestException(e.getMessage()); } } Set<String> queues = new HashSet<String>(1); queues.add(queueQuery); request.setQueues(queues); } if (userQuery != null && !userQuery.isEmpty()) { Set<String> users = new HashSet<String>(1); users.add(userQuery); request.setUsers(users); } List<ApplicationReport> appReports = null; try { appReports = rm.getClientRMService().getApplications(request, false) .getApplicationList(); } catch (YarnException e) { LOG.error("Unable to retrieve apps from ClientRMService", e); throw new YarnRuntimeException( "Unable to retrieve apps from ClientRMService", e); } final ConcurrentMap<ApplicationId, RMApp> apps = rm.getRMContext().getRMApps(); AppsInfo allApps = new AppsInfo(); for (ApplicationReport report : appReports) { RMApp rmapp = apps.get(report.getApplicationId()); if (finalStatusQuery != null && !finalStatusQuery.isEmpty()) { FinalApplicationStatus.valueOf(finalStatusQuery); if (!rmapp.getFinalApplicationStatus().toString() .equalsIgnoreCase(finalStatusQuery)) { continue; } } AppInfo app = new AppInfo(rmapp, hasAccess(rmapp, hsr), WebAppUtils.getHttpSchemePrefix(conf)); allApps.add(app); } return allApps; } @GET @Path("/appstatistics") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public ApplicationStatisticsInfo getAppStatistics( @Context HttpServletRequest hsr, @QueryParam("states") Set<String> stateQueries, @QueryParam("applicationTypes") Set<String> typeQueries) { init(); // parse the params and build the scoreboard // converting state/type name to lowercase Set<String> states = parseQueries(stateQueries, true); Set<String> types = parseQueries(typeQueries, false); // if no types, counts the applications of any types if (types.size() == 0) { types.add(ANY); } else if (types.size() != 1) { throw new BadRequestException("# of applicationTypes = " + types.size() + ", we temporarily support at most one applicationType"); } // if no states, returns the counts of all RMAppStates if (states.size() == 0) { for (YarnApplicationState state : YarnApplicationState.values()) { states.add(state.toString().toLowerCase()); } } // in case we extend to multiple applicationTypes in the future Map<YarnApplicationState, Map<String, Long>> scoreboard = buildScoreboard(states, types); // go through the apps in RM to count the numbers, ignoring the case of // the state/type name ConcurrentMap<ApplicationId, RMApp> apps = rm.getRMContext().getRMApps(); for (RMApp rmapp : apps.values()) { YarnApplicationState state = rmapp.createApplicationState(); String type = rmapp.getApplicationType().trim().toLowerCase(); if (states.contains(state.toString().toLowerCase())) { if (types.contains(ANY)) { countApp(scoreboard, state, ANY); } else if (types.contains(type)) { countApp(scoreboard, state, type); } } } // fill the response object ApplicationStatisticsInfo appStatInfo = new ApplicationStatisticsInfo(); for (Map.Entry<YarnApplicationState, Map<String, Long>> partScoreboard : scoreboard .entrySet()) { for (Map.Entry<String, Long> statEntry : partScoreboard.getValue() .entrySet()) { StatisticsItemInfo statItem = new StatisticsItemInfo(partScoreboard.getKey(), statEntry.getKey(), statEntry.getValue()); appStatInfo.add(statItem); } } return appStatInfo; } private static Set<String> parseQueries(Set<String> queries, boolean isState) { Set<String> params = new HashSet<String>(); if (!queries.isEmpty()) { for (String query : queries) { if (query != null && !query.trim().isEmpty()) { String[] paramStrs = query.split(","); for (String paramStr : paramStrs) { if (paramStr != null && !paramStr.trim().isEmpty()) { if (isState) { try { // enum string is in the uppercase YarnApplicationState.valueOf(paramStr.trim().toUpperCase()); } catch (RuntimeException e) { YarnApplicationState[] stateArray = YarnApplicationState.values(); String allAppStates = Arrays.toString(stateArray); throw new BadRequestException( "Invalid application-state " + paramStr.trim() + " specified. It should be one of " + allAppStates); } } params.add(paramStr.trim().toLowerCase()); } } } } } return params; } private static Map<YarnApplicationState, Map<String, Long>> buildScoreboard( Set<String> states, Set<String> types) { Map<YarnApplicationState, Map<String, Long>> scoreboard = new HashMap<YarnApplicationState, Map<String, Long>>(); // default states will result in enumerating all YarnApplicationStates assert !states.isEmpty(); for (String state : states) { Map<String, Long> partScoreboard = new HashMap<String, Long>(); scoreboard.put(YarnApplicationState.valueOf(state.toUpperCase()), partScoreboard); // types is verified no to be empty for (String type : types) { partScoreboard.put(type, 0L); } } return scoreboard; } private static void countApp( Map<YarnApplicationState, Map<String, Long>> scoreboard, YarnApplicationState state, String type) { Map<String, Long> partScoreboard = scoreboard.get(state); Long count = partScoreboard.get(type); partScoreboard.put(type, count + 1L); } @GET @Path("/apps/{appid}") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public AppInfo getApp( @Context HttpServletRequest hsr, @PathParam("appid") String appId) { init(); if (appId == null || appId.isEmpty()) { throw new NotFoundException("appId, " + appId + ", is empty or null"); } ApplicationId id; id = ConverterUtils.toApplicationId(recordFactory, appId); if (id == null) { throw new NotFoundException("appId is null"); } RMApp app = rm.getRMContext().getRMApps().get(id); if (app == null) { throw new NotFoundException("app with id: " + appId + " not found"); } return new AppInfo(app, hasAccess(app, hsr), hsr.getScheme() + "://"); } @GET @Path("/apps/{appid}/appattempts") @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) public AppAttemptsInfo getAppAttempts( @PathParam("appid") String appId) { init(); if (appId == null || appId.isEmpty()) { throw new NotFoundException("appId, " + appId + ", is empty or null"); } ApplicationId id; id = ConverterUtils.toApplicationId(recordFactory, appId); if (id == null) { throw new NotFoundException("appId is null"); } RMApp app = rm.getRMContext().getRMApps().get(id); if (app == null) { throw new NotFoundException("app with id: " + appId + " not found"); } AppAttemptsInfo appAttemptsInfo = new AppAttemptsInfo(); for (RMAppAttempt attempt : app.getAppAttempts().values()) { AppAttemptInfo attemptInfo = new AppAttemptInfo(attempt, app.getUser()); appAttemptsInfo.add(attemptInfo); } return appAttemptsInfo; } }
package hudson.plugins.analysis.test; import static junit.framework.Assert.*; import hudson.XmlFile; import hudson.plugins.analysis.Messages; import hudson.plugins.analysis.util.model.AbstractAnnotation; import hudson.plugins.analysis.util.model.FileAnnotation; import hudson.plugins.analysis.util.model.JavaPackage; import hudson.plugins.analysis.util.model.JavaProject; import hudson.plugins.analysis.util.model.LineRange; import hudson.plugins.analysis.util.model.MavenModule; import hudson.plugins.analysis.util.model.Priority; import hudson.plugins.analysis.util.model.WorkspaceFile; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.net.URISyntaxException; import java.util.Collection; import org.apache.commons.lang.StringUtils; import org.junit.Test; /** * Tests the serialization of the model. * * @see <a href="http://www.ibm.com/developerworks/library/j-serialtest.html">Testing object serialization</a> */ public abstract class AbstractSerializeModelTest extends AbstractEnglishLocaleTest { /** Task property. */ protected static final String MODULE2 = "Module2"; /** Task property. */ protected static final String MODULE1 = "Module1"; /** Task property. */ protected static final String PACKAGE2 = "Package2"; /** Task property. */ protected static final String PACKAGE1 = "Package1"; /** Task property. */ protected static final String PATH_TO_FILE2 = "Path/To/File2"; /** Task property. */ protected static final String PATH_TO_FILE1 = "Path/To/File"; /** Short file name. */ private static final String FILE1 = StringUtils.substringAfterLast(PATH_TO_FILE1, "/"); /** Task property. */ protected static final String LOW = "Low"; /** Task property. */ protected static final String NORMAL = "NORMAL"; /** Task property. */ protected static final String HIGH = "high"; /** Task property. */ protected static final String TEST_TASK5 = "Test Task5"; /** Task property. */ protected static final String TEST_TASK4 = "Test Task4"; /** Task property. */ protected static final String TEST_TASK3 = "Test Task3"; /** Task property. */ protected static final String TEST_TASK2 = "Test Task2"; /** Task property. */ protected static final String TEST_TASK1 = "Test Task1"; /** Task property. */ protected static final int LINE_NUMBER = 100; /** Error Message. */ private static final String WRONG_PACKAGE_NAME = "Wrong package name."; /** Error Message. */ private static final String WRONG_TOOLTIP_CREATED = "Wrong tooltip created."; /** Error Message. */ private static final String WRONG_NUMBER_OF_FILES = "Wrong number of files."; /** Error Message. */ private static final String WRONG_NUMBER_OF_PACKAGES = "Wrong number of packages."; /** Error Message. */ private static final String WRONG_NUMBER_OF_MODULES = "Wrong number of modules."; /** Error Message. */ private static final String WRONG_NUMBER_OF_ANNOTATIONS = "Wrong number of annotations."; /** Error Message. */ private static final String WRONG_MODULE_ERROR = "Wrong module error."; /** Error Message. */ private static final String WRONG_ANNOTATION_KEY = "Wrong annotation key."; /** Error Message. */ private static final String WRONG_MODULE_NAME = "Wrong module name."; /** Error Message. */ private static final String WRONG_FILE_SHORT_NAME = "Wrong file short name"; /** Error Message. */ private static final String WRONG_FILE_NAME = "Wrong file name."; /** Error Message. */ private static final String MODULE_NOT_IN_PROJECT = "Module not in project."; /** Error Message. */ private static final String PACKAGE_NOT_IN_MODULE = "Package not in module."; /** The fist created annotation. */ private AbstractAnnotation firstAnnotation; /** * Creates the original object that will be serialized. * * @return the annotation container */ private JavaProject createOriginal() { JavaProject project = new JavaProject(); addAnnotation(project, LINE_NUMBER, TEST_TASK1, Priority.HIGH, PATH_TO_FILE1, PACKAGE1, MODULE1); addAnnotation(project, LINE_NUMBER, TEST_TASK2, Priority.LOW, PATH_TO_FILE1, PACKAGE1, MODULE1); addAnnotation(project, LINE_NUMBER, TEST_TASK3, Priority.LOW, PATH_TO_FILE2, PACKAGE1, MODULE1); addAnnotation(project, LINE_NUMBER, TEST_TASK4, Priority.NORMAL, PATH_TO_FILE1, PACKAGE2, MODULE1); addAnnotation(project, LINE_NUMBER, TEST_TASK5, Priority.NORMAL, PATH_TO_FILE1, PACKAGE1, MODULE2); verifyProject(project); verifyFirstAnnotation(project); // try { // OutputStream fout = new FileOutputStream("/home/hafner/project.ser"); // ObjectOutputStream out = new ObjectOutputStream(fout); // // out.writeObject(project); // out.flush(); // out.close(); // } // catch (FileNotFoundException exception) { // // ignore // } // catch (IOException exception) { // // ignore // } return project; } /** * Creates a new object from the serialization data stored in the specified * file. * * @param fileName * the file to get the data from * @return the deserialized object * @param <T> type of object */ @SuppressWarnings("unchecked") @edu.umd.cs.findbugs.annotations.SuppressWarnings("UI") protected <T> T deserialize(final String fileName) { try { InputStream inputStream = getClass().getResourceAsStream(fileName); return (T)readFrom(new ObjectInputStream(inputStream)); } catch (IOException exception) { throw new IllegalArgumentException(exception); } catch (ClassNotFoundException exception) { throw new IllegalArgumentException(exception); } } private Object readFrom(final ObjectInputStream objectStream) throws IOException, ClassNotFoundException { try { return objectStream.readObject(); } finally { objectStream.close(); } } /** * Verifies the first annotation of the project. * * @param project the created project */ @SuppressWarnings("PMD") protected void verifyFirstAnnotation(final JavaProject project) { FileAnnotation annotation = project.getAnnotation(firstAnnotation.getKey()); assertEquals(WRONG_ANNOTATION_KEY, firstAnnotation, annotation); annotation = project.getAnnotation(String.valueOf(firstAnnotation.getKey())); assertEquals(WRONG_ANNOTATION_KEY, firstAnnotation, annotation); verifyFirstAnnotation(firstAnnotation); JavaProject dummyProject = new JavaProject(); addAnnotation(dummyProject, LINE_NUMBER, TEST_TASK1, Priority.HIGH, PATH_TO_FILE1, PACKAGE1, MODULE1); FileAnnotation other = dummyProject.getAnnotations().iterator().next(); assertEquals("Wrong equals evaluation.", annotation, other); } /** * Verifies the first created annotation. * * @param annotation * the first created annotation */ protected abstract void verifyFirstAnnotation(final AbstractAnnotation annotation); /** * Adds a new tasks to the specified project. * * @param project * the project to add the tasks to * @param line * the line * @param message * the message * @param priority * the priority * @param fileName * the file name * @param packageName * the package name * @param moduleName * the module name */ private void addAnnotation(final JavaProject project, final int line, final String message, final Priority priority, final String fileName, final String packageName, final String moduleName) { AbstractAnnotation task = createAnnotation(line, message, priority, fileName, packageName, moduleName); if (firstAnnotation == null) { firstAnnotation = task; } project.addAnnotation(task); } /** * Creates an annotation. * * @param line * the line * @param message * the message * @param priority * the priority * @param fileName * the file name * @param packageName * the package name * @param moduleName * the module name * @return the annotation */ protected abstract AbstractAnnotation createAnnotation(final int line, final String message, final Priority priority, final String fileName, final String packageName, final String moduleName); /** * Test whether a serialized task is the same object after deserialization. * * @throws Exception Signals a test failure */ @Test public void testObjectIsSameAfterDeserialization() throws Exception { JavaProject original = createOriginal(); // Collection<FileAnnotation> files = original.getAnnotations(); // createXmlFile(new File("/home/hafner/project.ser.xml")).write(files.toArray(new FileAnnotation[files.size()])); ByteArrayOutputStream outputStream = serialize(original); JavaProject copy = deserialize(outputStream.toByteArray()); verifyProject(copy); verifyFirstAnnotation(copy); } /** * Creates the XML serialization file. * * @param file the file for the XML data * @return the XML serialization file */ protected abstract XmlFile createXmlFile(final File file); /** * Deserializes an object from the specified data and returns it. * * @param objectData * the serialized object in plain bytes * @return the deserialized object * @throws IOException * in case of an IO error * @throws ClassNotFoundException * if the wrong class is created */ private JavaProject deserialize(final byte[] objectData) throws IOException, ClassNotFoundException { InputStream inputStream = new ByteArrayInputStream(objectData); ObjectInputStream objectStream = new ObjectInputStream(inputStream); Object readObject = objectStream.readObject(); return (JavaProject) readObject; } /** * Serializes the specified object and returns the created output stream. * * @param original * original object * @return created output stream * @throws IOException * if the file could not be written */ private ByteArrayOutputStream serialize(final JavaProject original) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ObjectOutputStream objectStream = new ObjectOutputStream(outputStream); objectStream.writeObject(original); objectStream.close(); return outputStream; } /** * Verifies the created project. * * @param project the created project */ @SuppressWarnings("PMD") protected void verifyProject(final JavaProject project) { assertTrue(project.hasAnnotations()); checkSummary(project); assertTrue(MODULE_NOT_IN_PROJECT, project.containsModule(MODULE1)); assertTrue(MODULE_NOT_IN_PROJECT, project.containsModule(MODULE2)); checkFirstModule(project.getModule(MODULE1)); checkSecondModule(project.getModule(MODULE2)); for (FileAnnotation annotation : project.getModule(MODULE2).getAnnotations()) { assertEquals("Wrong primary line number.", LINE_NUMBER, annotation.getPrimaryLineNumber()); Collection<LineRange> lineRanges = annotation.getLineRanges(); assertEquals("Wrong number of ranges.", 1, lineRanges.size()); LineRange range = lineRanges.iterator().next(); assertEquals("Wrong start line number.", LINE_NUMBER, range.getStart()); assertEquals("Wrong end line number.", LINE_NUMBER, range.getEnd()); assertEquals("Wrong package prefix.", Messages.PackageDetail_header(), project.getModule(MODULE2).getPackageCategoryName()); assertSame(annotation, project.getAnnotation(annotation.getKey())); assertSame(annotation, project.getAnnotation(Long.toString(annotation.getKey()))); } } /** * Checks the second module of the project. * * @param module * the module to check */ private void checkSecondModule(final MavenModule module) { assertNull(WRONG_MODULE_ERROR, module.getError()); assertEquals(WRONG_MODULE_NAME, MODULE2, module.getName()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, module.getNumberOfAnnotations()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, module.getAnnotations().size()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, module.getNumberOfAnnotations(Priority.HIGH)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, module.getAnnotations(Priority.HIGH).size()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, module.getNumberOfAnnotations(Priority.NORMAL)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, module.getAnnotations(Priority.NORMAL).size()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, module.getNumberOfAnnotations(Priority.LOW)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, module.getAnnotations(Priority.LOW).size()); assertEquals(WRONG_TOOLTIP_CREATED, "Normal:1", module.getToolTip()); assertEquals(WRONG_NUMBER_OF_PACKAGES, 1, module.getPackages().size()); assertEquals(WRONG_NUMBER_OF_FILES, 1, module.getFiles().size()); assertTrue(PACKAGE_NOT_IN_MODULE, module.containsPackage(PACKAGE1)); assertFalse("Package in module.", module.containsPackage(PACKAGE2)); JavaPackage javaPackage = module.getPackage(PACKAGE1); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, javaPackage.getNumberOfAnnotations()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, javaPackage.getNumberOfAnnotations(Priority.HIGH)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, javaPackage.getNumberOfAnnotations(Priority.NORMAL)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, javaPackage.getNumberOfAnnotations(Priority.LOW)); WorkspaceFile file = module.getFile(PATH_TO_FILE1); assertEquals(WRONG_FILE_NAME, PATH_TO_FILE1, file.getName()); assertEquals(WRONG_FILE_SHORT_NAME, FILE1, file.getShortName()); assertTrue(WRONG_NUMBER_OF_ANNOTATIONS, file.hasAnnotations()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, file.getNumberOfAnnotations()); assertFalse(WRONG_NUMBER_OF_ANNOTATIONS, file.hasAnnotations(HIGH)); assertFalse(WRONG_NUMBER_OF_ANNOTATIONS, file.hasAnnotations(Priority.HIGH)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, file.getNumberOfAnnotations(Priority.HIGH)); assertTrue(WRONG_NUMBER_OF_ANNOTATIONS, file.hasAnnotations(NORMAL)); assertTrue(WRONG_NUMBER_OF_ANNOTATIONS, file.hasAnnotations(Priority.NORMAL)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, file.getNumberOfAnnotations(Priority.NORMAL)); assertFalse(WRONG_NUMBER_OF_ANNOTATIONS, file.hasAnnotations(LOW)); assertFalse(WRONG_NUMBER_OF_ANNOTATIONS, file.hasAnnotations(Priority.LOW)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, file.getNumberOfAnnotations(Priority.LOW)); assertEquals(WRONG_TOOLTIP_CREATED, "Normal:1", file.getToolTip()); } /** * Checks the first module of the project. * * @param module * the module to check */ private void checkFirstModule(final MavenModule module) { assertEquals(WRONG_MODULE_NAME, MODULE1, module.getName()); assertNull(WRONG_MODULE_ERROR, module.getError()); assertEquals(WRONG_NUMBER_OF_MODULES, 0, module.getModules().size()); assertEquals(WRONG_NUMBER_OF_PACKAGES, 2, module.getPackages().size()); assertEquals(WRONG_NUMBER_OF_FILES, 2, module.getFiles().size()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 4, module.getNumberOfAnnotations()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, module.getNumberOfAnnotations(Priority.HIGH)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, module.getNumberOfAnnotations(Priority.NORMAL)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 2, module.getNumberOfAnnotations(Priority.LOW)); assertTrue(PACKAGE_NOT_IN_MODULE, module.containsPackage(PACKAGE1)); JavaPackage javaPackage = module.getPackage(PACKAGE1); assertEquals(WRONG_PACKAGE_NAME, PACKAGE1, javaPackage.getName()); assertEquals(WRONG_NUMBER_OF_MODULES, 0, javaPackage.getModules().size()); assertEquals(WRONG_NUMBER_OF_PACKAGES, 0, javaPackage.getPackages().size()); assertEquals(WRONG_NUMBER_OF_FILES, 2, javaPackage.getFiles().size()); assertEquals(WRONG_NUMBER_OF_FILES, 2, javaPackage.getFiles().size()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 3, javaPackage.getNumberOfAnnotations()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, javaPackage.getNumberOfAnnotations(Priority.HIGH)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, javaPackage.getNumberOfAnnotations(Priority.NORMAL)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 2, javaPackage.getNumberOfAnnotations(Priority.LOW)); assertTrue("File not in package.", javaPackage.containsFile(PATH_TO_FILE1)); WorkspaceFile file = javaPackage.getFile(PATH_TO_FILE1); assertEquals(WRONG_FILE_NAME, PATH_TO_FILE1, file.getName()); assertEquals(WRONG_FILE_SHORT_NAME, FILE1, file.getShortName()); assertEquals(WRONG_NUMBER_OF_MODULES, 0, file.getModules().size()); assertEquals(WRONG_NUMBER_OF_PACKAGES, 0, file.getPackages().size()); assertEquals(WRONG_NUMBER_OF_FILES, 0, file.getFiles().size()); javaPackage = module.getPackage(PACKAGE2); assertEquals(WRONG_PACKAGE_NAME, PACKAGE2, javaPackage.getName()); assertEquals(WRONG_NUMBER_OF_FILES, 1, javaPackage.getFiles().size()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, javaPackage.getNumberOfAnnotations()); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, javaPackage.getNumberOfAnnotations(Priority.HIGH)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, javaPackage.getNumberOfAnnotations(Priority.NORMAL)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 0, javaPackage.getNumberOfAnnotations(Priority.LOW)); } /** * Checks the summary information of the project. * * @param project * the project to check */ private void checkSummary(final JavaProject project) { assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 5, project.getNumberOfAnnotations()); assertTrue(WRONG_NUMBER_OF_ANNOTATIONS, project.hasAnnotations(Priority.HIGH)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 1, project.getNumberOfAnnotations(Priority.HIGH)); assertTrue(WRONG_NUMBER_OF_ANNOTATIONS, project.hasAnnotations(Priority.NORMAL)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 2, project.getNumberOfAnnotations(Priority.NORMAL)); assertTrue(WRONG_NUMBER_OF_ANNOTATIONS, project.hasAnnotations(Priority.LOW)); assertEquals(WRONG_NUMBER_OF_ANNOTATIONS, 2, project.getNumberOfAnnotations(Priority.LOW)); assertEquals(WRONG_TOOLTIP_CREATED, "High:1 - Normal:2 - Low:2", project.getToolTip()); assertEquals(WRONG_NUMBER_OF_MODULES, 2, project.getModules().size()); assertEquals(WRONG_NUMBER_OF_PACKAGES, 2, project.getPackages().size()); assertEquals(WRONG_NUMBER_OF_FILES, 2, project.getFiles().size()); } /** * Verifies that the specified XML file is a valid serialization of the * project. * * @param fileName * the file name */ protected void ensureSerialization(final String fileName) { try { File file = new File(getClass().getResource(fileName).toURI()); XmlFile xmlFile = createXmlFile(file); Object deserialized = xmlFile.read(); FileAnnotation[] files = (FileAnnotation[]) deserialized; JavaProject project = new JavaProject(); project.addAnnotations(files); verifyProject(project); } catch (URISyntaxException exception) { throw new IllegalArgumentException(exception); } catch (IOException exception) { throw new IllegalArgumentException(exception); } } }
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ package org.tensorflow.lite.support.tensorbuffer; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.Arrays; import org.checkerframework.checker.nullness.qual.NonNull; import org.tensorflow.lite.DataType; import org.tensorflow.lite.support.common.SupportPrecondtions; /** Represents the data buffer for either a model's input or its output. */ public abstract class TensorBuffer { /** Where the data is stored. */ protected ByteBuffer buffer; /** Shape of the tensor stored in this buffer. */ protected int[] shape; /** Number of elements in the buffer. It will be changed to a proper value in the constructor. */ protected int flatSize = -1; /** * Indicator of whether this buffer is dynamic or fixed-size. Fixed-size buffers will have * pre-allocated memory and fixed size. While the size of dynamic buffers can be changed. */ protected final boolean isDynamic; /** * Creates a {@link TensorBuffer} with specified {@code shape} and {@link DataType}. Here are some * examples: * * <pre> * Creating a float TensorBuffer with shape {2, 3}: * int[] shape = new int[] {2, 3}; * TensorBuffer tensorBuffer = TensorBuffer.createFixedSize(shape, DataType.FLOAT32); * </pre> * * <pre> * Creating an uint8 TensorBuffer of a scalar: * int[] shape = new int[] {}; * TensorBuffer tensorBuffer = TensorBuffer.createFixedSize(shape, DataType.UINT8); * </pre> * * <pre> * Creating an empty uint8 TensorBuffer: * int[] shape = new int[] {0}; * TensorBuffer tensorBuffer = TensorBuffer.createFixedSize(shape, DataType.UINT8); * </pre> * * <p>The size of a fixed-size TensorBuffer cannot be changed once it is created. However, loading * arraies or data buffers of the same buffer size but different shapes is allowed. * * <p>TODO(b/139782181): Shall we make it fixed-size or fixed-shape? * * @param shape The shape of the {@link TensorBuffer} to be created. * @param dataType The dataType of the {@link TensorBuffer} to be created. * @throws NullPointerException if {@code shape} is null. * @throws IllegalArgumentException if {@code shape} has non-positive elements. */ @NonNull public static TensorBuffer createFixedSize(@NonNull int[] shape, DataType dataType) { switch (dataType) { case FLOAT32: return new TensorBufferFloat(shape); case UINT8: return new TensorBufferUint8(shape); default: throw new AssertionError("TensorBuffer does not support data type: " + dataType); } } /** * Creates an empty dynamic {@link TensorBuffer} with specified {@link DataType}. The shape of the * created {@link TensorBuffer} is {0}. * * <p>Dynamic TensorBuffers will reallocate memory when Loading arraies or data buffers of * different buffer sizes. * * @param dataType The dataType of the {@link TensorBuffer} to be created. */ @NonNull public static TensorBuffer createDynamic(DataType dataType) { switch (dataType) { case FLOAT32: return new TensorBufferFloat(); case UINT8: return new TensorBufferUint8(); default: throw new AssertionError("TensorBuffer does not support data type: " + dataType); } } /** * Creates a {@link TensorBuffer} deep-copying data from another, with specified {@link DataType}. * * @param buffer the source {@link TensorBuffer} to copy from. * @param dataType the expected {@link DataType} of newly created {@link TensorBuffer}. * @throws NullPointerException if {@code buffer} is null. */ @NonNull public static TensorBuffer createFrom(@NonNull TensorBuffer buffer, DataType dataType) { SupportPrecondtions.checkNotNull(buffer, "Cannot create a buffer from null"); TensorBuffer result; if (buffer.isDynamic()) { result = createDynamic(dataType); } else { result = createFixedSize(buffer.shape, dataType); } // The only scenario we need float array is FLOAT32->FLOAT32, or we can always use INT as // intermediate container. // The assumption is not true when we support other data types. if (buffer.getDataType() == DataType.FLOAT32 && dataType == DataType.FLOAT32) { float[] data = buffer.getFloatArray(); result.loadArray(data, buffer.shape); } else { int[] data = buffer.getIntArray(); result.loadArray(data, buffer.shape); } return result; } /** Returns the data buffer. */ @NonNull public ByteBuffer getBuffer() { return buffer; } /** Gets the {@link TensorBuffer#flatSize} of the buffer. */ public int getFlatSize() { return flatSize; } /** Gets the current shape. (returning a copy here to avoid unexpected modification.) */ @NonNull public int[] getShape() { return Arrays.copyOf(shape, shape.length); } /** Returns the data type of this buffer. */ public abstract DataType getDataType(); /** * Returns a float array of the values stored in this buffer. If the buffer is of different types * than float, the values will be converted into float. For example, values in {@link * TensorBufferUint8} will be converted from uint8 to float. */ @NonNull public abstract float[] getFloatArray(); /** * Returns an int array of the values stored in this buffer. If the buffer is of different type * than int, the values will be converted into int, and loss of precision may apply. For example, * getting an int array from a {@link TensorBufferFloat} with values {400.32f, 23.04f}, the output * is {400, 23}. */ @NonNull public abstract int[] getIntArray(); /** * Returns the number of bytes of a single element in the array. For example, a float buffer will * return 4, and a byte buffer will return 1. */ public abstract int getTypeSize(); /** Returns if the TensorBuffer is dynamic sized (could resize arbitrarily). */ public boolean isDynamic() { return isDynamic; } /** * Loads an int array into this buffer with specific shape. If the buffer is of different types * than int, the values will be converted into the buffer's type before being loaded into the * buffer, and loss of precision may apply. For example, loading an int array with values {400, * -23} into a {@link TensorBufferUint8} , the values will be clamped to [0, 255] and then be * casted to uint8 by {255, 0}. * * @param src The source array to be loaded. * @param shape Shape of the tensor that {@code src} represents. * @throws NullPointerException if {@code src} is null. * @throws NullPointerException if {@code shape} is null. * @throws IllegalArgumentException if the size of the array to be loaded does not match the * specified shape. */ public abstract void loadArray(@NonNull int[] src, @NonNull int[] shape); /** * Loads an int array into this buffer. If the buffer is of different types than int, the values * will be converted into the buffer's type before being loaded into the buffer, and loss of * precision may apply. For example, loading an int array with values {400, -23} into a {@link * TensorBufferUint8} , the values will be clamped to [0, 255] and then be casted to uint8 by * {255, 0}. * * <p>Size of {@code src} should always match the flat size of this {@link TensorBuffer}, for both * fixed-size and dynamic {@link TensorBuffer}. * * @param src The source array to be loaded. */ public void loadArray(@NonNull int[] src) { loadArray(src, shape); } /** * Loads a float array into this buffer with specific shape. If the buffer is of different types * than float, the values will be converted into the buffer's type before being loaded into the * buffer, and loss of precision may apply. For example, loading a float array into a {@link * TensorBufferUint8} with values {400.32f, -23.04f}, the values will be clamped to [0, 255] and * then be casted to uint8 by {255, 0}. * * @param src The source array to be loaded. * @param shape Shape of the tensor that {@code src} represents. * @throws NullPointerException if {@code src} is null. * @throws NullPointerException if {@code shape} is null. * @throws IllegalArgumentException if the size of the array to be loaded does not match the * specified shape. */ public abstract void loadArray(@NonNull float[] src, @NonNull int[] shape); /** * Loads a float array into this buffer. If the buffer is of different types than float, the * values will be converted into the buffer's type before being loaded into the buffer, and loss * of precision may apply. For example, loading a float array into a {@link TensorBufferUint8} * with values {400.32f, -23.04f}, the values will be clamped to [0, 255] and then be casted to * uint8 by {255, 0}. * * <p>Size of {@code src} should always match the flat size of this {@link TensorBuffer}, for both * fixed-size and dynamic {@link TensorBuffer}. * * @param src The source array to be loaded. */ public void loadArray(@NonNull float[] src) { loadArray(src, shape); } /** * Loads a byte buffer into this {@link TensorBuffer} with specific shape. * * <p>Important: The loaded buffer is a reference. DO NOT MODIFY. We don't create a copy here for * performance concern, but if modification is necessary, please make a copy. * * @param buffer The byte buffer to load. * @throws NullPointerException if {@code buffer} is null. * @throws IllegalArgumentException if the size of {@code buffer} and {@code typeSize} do not * match or the size of {@code buffer} and {@code flatSize} do not match. */ public void loadBuffer(@NonNull ByteBuffer buffer, @NonNull int[] shape) { SupportPrecondtions.checkNotNull(buffer, "Byte buffer cannot be null."); int flatSize = computeFlatSize(shape); SupportPrecondtions.checkArgument( (buffer.limit() == getTypeSize() * flatSize), "The size of byte buffer and the shape do not match."); if (!isDynamic) { SupportPrecondtions.checkArgument( flatSize == this.flatSize, "The size of byte buffer and the size of the tensor buffer do not match."); } else { this.flatSize = flatSize; } this.shape = shape.clone(); buffer.rewind(); this.buffer = buffer; } /** * Loads a byte buffer into this {@link TensorBuffer}. Buffer size must match the flat size of * this {@link TensorBuffer}. * * <p>Important: The loaded buffer is a reference. DO NOT MODIFY. We don't create a copy here for * performance concern, but if modification is necessary, please make a copy. * * @param buffer The byte buffer to load. */ public void loadBuffer(@NonNull ByteBuffer buffer) { loadBuffer(buffer, shape); } /** * Constructs a fixed size {@link TensorBuffer} with specified {@code shape}. * * @throws NullPointerException if {@code shape} is null. * @throws IllegalArgumentException if {@code shape} has non-positive elements. */ protected TensorBuffer(@NonNull int[] shape) { isDynamic = false; allocateMemory(shape); } /** Constructs a dynamic {@link TensorBuffer} which can be resized. */ protected TensorBuffer() { isDynamic = true; // Initialize the dynamic TensorBuffer with an empty ByteBuffer. allocateMemory(new int[] {0}); } /** Calculates number of elements in the buffer. */ protected static int computeFlatSize(@NonNull int[] shape) { SupportPrecondtions.checkNotNull(shape, "Shape cannot be null."); int prod = 1; for (int s : shape) { prod = prod * s; } return prod; } /** * For dynamic buffer, resize the memory if needed. For fixed-size buffer, check if the {@code * shape} of src fits the buffer size. */ protected void resize(@NonNull int[] shape) { if (isDynamic) { allocateMemory(shape); } else { // Make sure the new shape fits the buffer size when TensorBuffer has fixed size. SupportPrecondtions.checkArgument(flatSize == computeFlatSize(shape)); this.shape = shape.clone(); } } /** * Allocates buffer with corresponding size of the {@code shape}. If shape is an empty array, this * TensorBuffer will be created as a scalar and its flatSize will be 1. * * @throws NullPointerException if {@code shape} is null. * @throws IllegalArgumentException if {@code shape} has negative elements. */ private void allocateMemory(@NonNull int[] shape) { SupportPrecondtions.checkNotNull(shape, "TensorBuffer shape cannot be null."); SupportPrecondtions.checkArgument( isShapeValid(shape), "Values in TensorBuffer shape should be non-negative."); // Check if the new shape is the same as current shape. int newFlatSize = computeFlatSize(shape); if (flatSize == newFlatSize) { return; } // Update to the new shape. flatSize = newFlatSize; this.shape = shape.clone(); buffer = ByteBuffer.allocateDirect(flatSize * getTypeSize()); buffer.order(ByteOrder.nativeOrder()); } /** * Checks if {@code shape} meets one of following two requirements: 1. Elements in {@code shape} * are all non-negative numbers. 2. {@code shape} is an empty array, which corresponds to scalar. */ private static boolean isShapeValid(@NonNull int[] shape) { if (shape.length == 0) { // This shape refers to a scalar. return true; } // This shape refers to a multidimentional array. for (int s : shape) { // All elements in shape should be non-negative. if (s < 0) { return false; } } return true; } }
package ca.uhn.fhir.util; /* * #%L * HAPI FHIR - Core Library * %% * Copyright (C) 2014 - 2017 University Health Network * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.HashMap; import java.util.Map; import javax.xml.namespace.NamespaceContext; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import org.apache.commons.lang3.StringUtils; public class PrettyPrintWriterWrapper implements XMLStreamWriter { private static final String INDENT_CHAR = " "; private static final String LINEFEED_CHAR = "\n"; private static final String PRE = "pre"; private int depth = 0; private Map<Integer, Boolean> hasChildElement = new HashMap<Integer, Boolean>(); private int myInsidePre = 0; private XMLStreamWriter myTarget; private boolean myFirstIndent=true; public PrettyPrintWriterWrapper(XMLStreamWriter target) { myTarget = target; } @Override public void close() throws XMLStreamException { myTarget.close(); } @Override public void flush() throws XMLStreamException { myTarget.flush(); } @CoverageIgnore @Override public NamespaceContext getNamespaceContext() { return myTarget.getNamespaceContext(); } @CoverageIgnore @Override public String getPrefix(String theUri) throws XMLStreamException { return myTarget.getPrefix(theUri); } @CoverageIgnore @Override public Object getProperty(String theName) throws IllegalArgumentException { return myTarget.getProperty(theName); } @CoverageIgnore @Override public void setDefaultNamespace(String theUri) throws XMLStreamException { myTarget.setDefaultNamespace(theUri); } @CoverageIgnore @Override public void setNamespaceContext(NamespaceContext theContext) throws XMLStreamException { myTarget.setNamespaceContext(theContext); } @CoverageIgnore @Override public void setPrefix(String thePrefix, String theUri) throws XMLStreamException { myTarget.setPrefix(thePrefix, theUri); } @Override public void writeAttribute(String theLocalName, String theValue) throws XMLStreamException { myTarget.writeAttribute(theLocalName, theValue); } @CoverageIgnore @Override public void writeAttribute(String theNamespaceURI, String theLocalName, String theValue) throws XMLStreamException { myTarget.writeAttribute(theNamespaceURI, theLocalName, theValue); } @CoverageIgnore @Override public void writeAttribute(String thePrefix, String theNamespaceURI, String theLocalName, String theValue) throws XMLStreamException { myTarget.writeAttribute(thePrefix, theNamespaceURI, theLocalName, theValue); } @CoverageIgnore @Override public void writeCData(String theData) throws XMLStreamException { myTarget.writeCData(theData); } @Override public void writeCharacters(char[] theText, int theStart, int theLen) throws XMLStreamException { NonPrettyPrintWriterWrapper.writeCharacters(theText, theStart, theLen, myTarget, myInsidePre); } @Override public void writeCharacters(String theText) throws XMLStreamException { if (myInsidePre > 0) { myTarget.writeCharacters(theText); } else { writeCharacters(theText.toCharArray(), 0, theText.length()); } } @Override public void writeComment(String theData) throws XMLStreamException { indent(); myTarget.writeComment(theData); } @Override public void writeDefaultNamespace(String theNamespaceURI) throws XMLStreamException { myTarget.writeDefaultNamespace(theNamespaceURI); } @CoverageIgnore @Override public void writeDTD(String theDtd) throws XMLStreamException { myTarget.writeDTD(theDtd); } @CoverageIgnore @Override public void writeEmptyElement(String theLocalName) throws XMLStreamException { indent(); myTarget.writeEmptyElement(theLocalName); } @CoverageIgnore @Override public void writeEmptyElement(String theNamespaceURI, String theLocalName) throws XMLStreamException { indent(); myTarget.writeEmptyElement(theNamespaceURI, theLocalName); } @CoverageIgnore @Override public void writeEmptyElement(String thePrefix, String theLocalName, String theNamespaceURI) throws XMLStreamException { indent(); myTarget.writeEmptyElement(thePrefix, theLocalName, theNamespaceURI); } @CoverageIgnore @Override public void writeEndDocument() throws XMLStreamException { decrementAndIndent(); myTarget.writeEndDocument(); } @Override public void writeEndElement() throws XMLStreamException { if (myInsidePre > 0) { myInsidePre--; } decrementAndIndent(); myTarget.writeEndElement(); } @CoverageIgnore @Override public void writeEntityRef(String theName) throws XMLStreamException { myTarget.writeEntityRef(theName); } @Override public void writeNamespace(String thePrefix, String theNamespaceURI) throws XMLStreamException { myTarget.writeNamespace(thePrefix, theNamespaceURI); } @CoverageIgnore @Override public void writeProcessingInstruction(String theTarget) throws XMLStreamException { myTarget.writeProcessingInstruction(theTarget); } @CoverageIgnore @Override public void writeProcessingInstruction(String theTarget, String theData) throws XMLStreamException { myTarget.writeProcessingInstruction(theTarget, theData); } @Override public void writeStartDocument() throws XMLStreamException { myFirstIndent=true; myTarget.writeStartDocument(); } @Override public void writeStartDocument(String theVersion) throws XMLStreamException { myFirstIndent=true; myTarget.writeStartDocument(theVersion); } @Override public void writeStartDocument(String theEncoding, String theVersion) throws XMLStreamException { myFirstIndent=true; myTarget.writeStartDocument(theEncoding, theVersion); } @Override public void writeStartElement(String theLocalName) throws XMLStreamException { indentAndAdd(); myTarget.writeStartElement(theLocalName); if (PRE.equals(theLocalName) || myInsidePre > 0) { myInsidePre++; } } @Override public void writeStartElement(String theNamespaceURI, String theLocalName) throws XMLStreamException { indentAndAdd(); myTarget.writeStartElement(theNamespaceURI, theLocalName); if (PRE.equals(theLocalName) || myInsidePre > 0) { myInsidePre++; } } @Override public void writeStartElement(String thePrefix, String theLocalName, String theNamespaceURI) throws XMLStreamException { indentAndAdd(); myTarget.writeStartElement(thePrefix, theLocalName, theNamespaceURI); if (PRE.equals(theLocalName) || myInsidePre > 0) { myInsidePre++; } } private void decrementAndIndent() throws XMLStreamException { if (myInsidePre > 0) { return; } depth--; if (hasChildElement.get(depth) == true) { // indent for current depth myTarget.writeCharacters(LINEFEED_CHAR + repeat(depth, INDENT_CHAR)); } } private void indent() throws XMLStreamException { if (myFirstIndent) { myFirstIndent = false; return; } myTarget.writeCharacters(LINEFEED_CHAR + repeat(depth, INDENT_CHAR)); } private void indentAndAdd() throws XMLStreamException { if (myInsidePre > 0) { return; } indent(); // update state of parent node if (depth > 0) { hasChildElement.put(depth - 1, true); } // reset state of current node hasChildElement.put(depth, false); depth++; } private String repeat(int d, String s) { return StringUtils.repeat(s, d * 3); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.work; import com.codahale.metrics.Gauge; import org.apache.drill.shaded.guava.com.google.common.base.Preconditions; import org.apache.drill.shaded.guava.com.google.common.collect.Lists; import org.apache.drill.shaded.guava.com.google.common.collect.Maps; import org.apache.drill.common.SelfCleaningRunnable; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.exec.coord.ClusterCoordinator; import org.apache.drill.exec.metrics.DrillMetrics; import org.apache.drill.exec.proto.BitControl.FragmentStatus; import org.apache.drill.exec.proto.BitData; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; import org.apache.drill.exec.proto.ExecProtos; import org.apache.drill.exec.proto.ExecProtos.FragmentHandle; import org.apache.drill.exec.proto.GeneralRPCProtos.Ack; import org.apache.drill.exec.proto.UserBitShared.QueryId; import org.apache.drill.exec.proto.helper.QueryIdHelper; import org.apache.drill.exec.rpc.DrillRpcFuture; import org.apache.drill.exec.rpc.RpcException; import org.apache.drill.exec.rpc.control.Controller; import org.apache.drill.exec.rpc.control.WorkEventBus; import org.apache.drill.exec.rpc.data.DataConnectionCreator; import org.apache.drill.exec.server.BootStrapContext; import org.apache.drill.exec.server.DrillbitContext; import org.apache.drill.exec.server.rest.auth.DrillUserPrincipal; import org.apache.drill.exec.store.sys.PersistentStoreProvider; import org.apache.drill.exec.work.batch.ControlMessageHandler; import org.apache.drill.exec.work.filter.RuntimeFilterWritable; import org.apache.drill.exec.work.foreman.Foreman; import org.apache.drill.exec.work.fragment.FragmentExecutor; import org.apache.drill.exec.work.fragment.FragmentManager; import org.apache.drill.exec.work.user.UserWorker; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * Manages the running fragments in a Drillbit. Periodically requests run-time stats updates from fragments * running elsewhere. */ public class WorkManager implements AutoCloseable { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WorkManager.class); private static final int EXIT_TIMEOUT_MS = 5000; /* * We use a {@see java.util.concurrent.ConcurrentHashMap} because it promises never to throw a * {@see java.util.ConcurrentModificationException}; we need that because the statusThread may * iterate over the map while other threads add FragmentExecutors via the {@see #WorkerBee}. */ private final ConcurrentMap<FragmentHandle, FragmentExecutor> runningFragments = Maps.newConcurrentMap(); private final ConcurrentMap<QueryId, Foreman> queries = Maps.newConcurrentMap(); private final BootStrapContext bContext; private DrillbitContext dContext; private final ControlMessageHandler controlMessageWorker; private final UserWorker userWorker; private final WorkerBee bee; private final WorkEventBus workBus; private final Executor executor; private final StatusThread statusThread; private final Lock isEmptyLock = new ReentrantLock(); private Condition isEmptyCondition; /** * How often the StatusThread collects statistics about running fragments. */ private final static int STATUS_PERIOD_SECONDS = 5; public WorkManager(final BootStrapContext context) { this.bContext = context; bee = new WorkerBee(); // TODO should this just be an interface? workBus = new WorkEventBus(); // TODO should this just be an interface? executor = context.getExecutor(); // TODO references to this escape here (via WorkerBee) before construction is done controlMessageWorker = new ControlMessageHandler(bee); // TODO getFragmentRunner(), getForemanForQueryId() userWorker = new UserWorker(bee); // TODO should just be an interface? addNewForeman(), getForemanForQueryId() statusThread = new StatusThread(); } public void start( final DrillbitEndpoint endpoint, final Controller controller, final DataConnectionCreator data, final ClusterCoordinator coord, final PersistentStoreProvider provider, final PersistentStoreProvider profilesProvider) { dContext = new DrillbitContext(endpoint, bContext, coord, controller, data, workBus, provider, profilesProvider); statusThread.start(); DrillMetrics.register("drill.fragments.running", (Gauge<Integer>) runningFragments::size); } public Executor getExecutor() { return executor; } public WorkEventBus getWorkBus() { return workBus; } public ControlMessageHandler getControlMessageHandler() { return controlMessageWorker; } public UserWorker getUserWorker() { return userWorker; } public WorkerBee getBee() { return bee; } @Override public void close() throws Exception { statusThread.interrupt(); final long numRunningFragments = runningFragments.size(); if (numRunningFragments != 0) { logger.warn("Closing WorkManager but there are {} running fragments.", numRunningFragments); if (logger.isDebugEnabled()) { for (final FragmentHandle handle : runningFragments.keySet()) { logger.debug("Fragment still running: {} status: {}", QueryIdHelper.getQueryIdentifier(handle), runningFragments.get(handle).getStatus()); } } } if (getContext() != null) { getContext().close(); } } public DrillbitContext getContext() { return dContext; } public void waitToExit(final boolean forcefulShutdown) { isEmptyLock.lock(); isEmptyCondition = isEmptyLock.newCondition(); try { if (forcefulShutdown) { final long startTime = System.currentTimeMillis(); final long endTime = startTime + EXIT_TIMEOUT_MS; long currentTime; while (!areQueriesAndFragmentsEmpty() && (currentTime = System.currentTimeMillis()) < endTime) { try { if (!isEmptyCondition.await(endTime - currentTime, TimeUnit.MILLISECONDS)) { break; } } catch (InterruptedException e) { logger.error("Interrupted while waiting to exit"); } } if (!areQueriesAndFragmentsEmpty()) { logger.warn("Timed out after {} millis. Shutting down before all fragments and foremen " + "have completed.", EXIT_TIMEOUT_MS); for (QueryId queryId: queries.keySet()) { logger.warn("Query {} is still running.", QueryIdHelper.getQueryId(queryId)); } for (FragmentHandle fragmentHandle: runningFragments.keySet()) { logger.warn("Fragment {} is still running.", QueryIdHelper.getQueryIdentifier(fragmentHandle)); } } } else { while (!areQueriesAndFragmentsEmpty()) { isEmptyCondition.awaitUninterruptibly(); } } } finally { isEmptyLock.unlock(); } } private boolean areQueriesAndFragmentsEmpty() { return queries.isEmpty() && runningFragments.isEmpty(); } /** * A thread calling the {@link #waitToExit(boolean)} method is notified when a foreman is retired. */ private void indicateIfSafeToExit() { isEmptyLock.lock(); try { if (isEmptyCondition != null) { logger.info("Waiting for {} running queries before shutting down.", queries.size()); logger.info("Waiting for {} running fragments before shutting down.", runningFragments.size()); if (areQueriesAndFragmentsEmpty()) { isEmptyCondition.signal(); } } } finally { isEmptyLock.unlock(); } } /** * Get the number of queries that are running on a drillbit. * Primarily used to monitor the number of running queries after a * shutdown request is triggered. */ public synchronized Map<String, Integer> getRemainingQueries() { Map<String, Integer> queriesInfo = new HashMap<>(); queriesInfo.put("queriesCount", queries.size()); queriesInfo.put("fragmentsCount", runningFragments.size()); return queriesInfo; } /** * Narrowed interface to WorkManager that is made available to tasks it is managing. */ public class WorkerBee { public void addNewForeman(final Foreman foreman) { queries.put(foreman.getQueryId(), foreman); // We're relying on the Foreman to clean itself up with retireForeman(). executor.execute(foreman); } /** * Add a self contained runnable work to executor service. * * @param runnable runnable to execute */ public void addNewWork(final Runnable runnable) { executor.execute(runnable); } public boolean cancelForeman(final QueryId queryId, DrillUserPrincipal principal) { Preconditions.checkNotNull(queryId); final Foreman foreman = queries.get(queryId); if (foreman == null) { return false; } final String queryIdString = QueryIdHelper.getQueryId(queryId); if (principal != null && !principal.canManageQueryOf(foreman.getQueryContext().getQueryUserName())) { throw UserException.permissionError() .message("Not authorized to cancel the query '%s'", queryIdString) .build(logger); } executor.execute(() -> { final Thread currentThread = Thread.currentThread(); final String originalName = currentThread.getName(); try { currentThread.setName(queryIdString + ":foreman:cancel"); logger.debug("Canceling foreman"); foreman.cancel(); } catch (Throwable t) { logger.warn("Exception while canceling foreman", t); } finally { currentThread.setName(originalName); } }); return true; } /** * Remove the given Foreman from the running query list. * * <p>The running query list is a bit of a misnomer, because it doesn't * necessarily mean that {@link org.apache.drill.exec.work.foreman.Foreman#run()} * is executing. That only lasts for the duration of query setup, after which * the Foreman instance survives as a state machine that reacts to events * from the local root fragment as well as RPC responses from remote Drillbits.</p> * * @param foreman the Foreman to retire */ public void retireForeman(final Foreman foreman) { Preconditions.checkNotNull(foreman); final QueryId queryId = foreman.getQueryId(); final boolean wasRemoved = queries.remove(queryId, foreman); if (!wasRemoved) { logger.warn("Couldn't find retiring Foreman for query " + queryId); } indicateIfSafeToExit(); } public Foreman getForemanForQueryId(final QueryId queryId) { return queries.get(queryId); } public DrillbitContext getContext() { return dContext; } /** * Currently used to start a root fragment that is not blocked on data, and leaf fragments. * @param fragmentExecutor the executor to run */ public void addFragmentRunner(final FragmentExecutor fragmentExecutor) { final FragmentHandle fragmentHandle = fragmentExecutor.getContext().getHandle(); runningFragments.put(fragmentHandle, fragmentExecutor); executor.execute(new SelfCleaningRunnable(fragmentExecutor) { @Override protected void cleanup() { runningFragments.remove(fragmentHandle); indicateIfSafeToExit(); } }); } /** * Currently used to start a root fragment that is blocked on data, and intermediate fragments. This method is * called, when the first batch arrives. * * @param fragmentManager the manager for the fragment */ public void startFragmentPendingRemote(final FragmentManager fragmentManager) { final FragmentHandle fragmentHandle = fragmentManager.getHandle(); final FragmentExecutor fragmentExecutor = fragmentManager.getRunnable(); if (fragmentExecutor == null) { // the fragment was most likely cancelled return; } runningFragments.put(fragmentHandle, fragmentExecutor); executor.execute(new SelfCleaningRunnable(fragmentExecutor) { @Override protected void cleanup() { runningFragments.remove(fragmentHandle); if (!fragmentManager.isCancelled()) { workBus.removeFragmentManager(fragmentHandle, false); } indicateIfSafeToExit(); } }); } public FragmentExecutor getFragmentRunner(final FragmentHandle handle) { return runningFragments.get(handle); } /** * receive the RuntimeFilter thorough the wire * @param runtimeFilter runtime filter */ public void receiveRuntimeFilter(final RuntimeFilterWritable runtimeFilter) { BitData.RuntimeFilterBDef runtimeFilterDef = runtimeFilter.getRuntimeFilterBDef(); boolean toForeman = runtimeFilterDef.getToForeman(); QueryId queryId = runtimeFilterDef.getQueryId(); String queryIdStr = QueryIdHelper.getQueryId(queryId); runtimeFilter.retainBuffers(1); //to foreman if (toForeman) { Foreman foreman = queries.get(queryId); if (foreman != null) { executor.execute(() -> { final Thread currentThread = Thread.currentThread(); final String originalName = currentThread.getName(); currentThread.setName(queryIdStr + ":foreman:routeRuntimeFilter"); try { foreman.getRuntimeFilterRouter().register(runtimeFilter); } catch (Exception e) { logger.warn("Exception while registering the RuntimeFilter", e); } finally { currentThread.setName(originalName); runtimeFilter.close(); } }); } } else { //to the probe side scan node int majorId = runtimeFilterDef.getMajorFragmentId(); int minorId = runtimeFilterDef.getMinorFragmentId(); ExecProtos.FragmentHandle fragmentHandle = ExecProtos.FragmentHandle.newBuilder().setMajorFragmentId(majorId) .setMinorFragmentId(minorId) .setQueryId(queryId).build(); FragmentExecutor fragmentExecutor = runningFragments.get(fragmentHandle); if (fragmentExecutor != null) { fragmentExecutor.getContext().addRuntimeFilter(runtimeFilter); } } } } /** * Periodically gather current statistics. {@link org.apache.drill.exec.work.foreman.QueryManager} uses a FragmentStatusListener to * maintain changes to state, and should be current. However, we want to collect current statistics * about RUNNING queries, such as current memory consumption, number of rows processed, and so on. * The FragmentStatusListener only tracks changes to state, so the statistics kept there will be * stale; this thread probes for current values. * * For each running fragment if the Foreman is the local Drillbit then status is updated locally bypassing the Control * Tunnel, whereas for remote Foreman it is sent over the Control Tunnel. */ private class StatusThread extends Thread { StatusThread() { // assume this thread is created by a non-daemon thread setName("WorkManager.StatusThread"); } @Override public void run() { // Get the controller and localBitEndPoint outside the loop since these will not change once a Drillbit and // StatusThread is started final Controller controller = dContext.getController(); final DrillbitEndpoint localBitEndPoint = dContext.getEndpoint(); while (true) { final List<DrillRpcFuture<Ack>> futures = Lists.newArrayList(); for (final FragmentExecutor fragmentExecutor : runningFragments.values()) { final FragmentStatus status = fragmentExecutor.getStatus(); if (status == null) { continue; } final DrillbitEndpoint foremanEndpoint = fragmentExecutor.getContext().getForemanEndpoint(); // If local endpoint is the Foreman for this running fragment, then submit the status locally bypassing the // Control Tunnel if (localBitEndPoint.equals(foremanEndpoint)) { workBus.statusUpdate(status); } else { // else send the status to remote Foreman over Control Tunnel futures.add(controller.getTunnel(foremanEndpoint).sendFragmentStatus(status)); } } for (final DrillRpcFuture<Ack> future : futures) { try { future.checkedGet(); } catch (final RpcException ex) { logger.info("Failure while sending intermediate fragment status to Foreman", ex); } } try { Thread.sleep(STATUS_PERIOD_SECONDS * 1000); } catch (final InterruptedException e) { // Preserve evidence that the interruption occurred so that code higher up on the call stack can learn of the // interruption and respond to it if it wants to. Thread.currentThread().interrupt(); // exit status thread on interrupt. break; } } } } }
package at.ac.tuwien.dsg.salam.cloud.generator; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.List; import org.apache.commons.math3.distribution.UniformRealDistribution; import org.apache.commons.math3.random.MersenneTwister; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import at.ac.tuwien.dsg.salam.common.interfaces.MetricMonitorInterface; import at.ac.tuwien.dsg.salam.common.model.ComputingElement; import at.ac.tuwien.dsg.salam.common.model.Functionality; import at.ac.tuwien.dsg.salam.common.model.HumanComputingElement; import at.ac.tuwien.dsg.salam.common.model.Service; import at.ac.tuwien.dsg.salam.util.ConfigJson; import at.ac.tuwien.dsg.salam.util.Util; public class ServiceGenerator { private JSONObject configRoot = null; private int seed; // distribution generators private UniformRealDistribution distPropToHave; private Object distPropValue; private UniformRealDistribution distConnToHave; private Object distConnWeight; private UniformRealDistribution distSvcToHave; private UniformRealDistribution[] distSvcPropToHaves; private Object[] distSvcPropValues; public ServiceGenerator(ConfigJson config) { this.configRoot = config.getRoot(); } public ArrayList<Service> generate() throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, SecurityException, ClassNotFoundException, JSONException { // init ArrayList<Service> services = new ArrayList<Service>(); ArrayList<ComputingElement> elements = new ArrayList<ComputingElement>(); // config seed = configRoot.getInt("seed"); int nElements = configRoot.getInt("numberOfElements"); JSONObject connCfg = configRoot.getJSONObject("connection"); JSONArray svcCfg = configRoot.getJSONArray("services"); JSONArray propCfg = configRoot.getJSONArray("commonProperties"); // generate elements Util.log().info("Generating " + nElements + " elements"); for (long i=1; i<=nElements; i++) { elements.add(new HumanComputingElement(i)); } // generate common properties for (int i=0; i<propCfg.length(); i++) { // get config JSONObject prop = propCfg.getJSONObject(i); String type = prop.getString("type"); String name = prop.getString("name"); double pToHave = prop.getDouble("probabilityToHave"); JSONObject mapping = null; //logger.info("Generating " + type + " " + name + "..."); // init distributions if (distPropToHave==null) distPropToHave = new UniformRealDistribution(new MersenneTwister(seed++), 0, 1); if (type.equals("static") || type.equals("skill")) { JSONObject valueCfg = prop.getJSONObject("value"); String clazz = GeneratorUtil.getFullClassName(valueCfg.getString("class")); JSONArray params = valueCfg.getJSONArray("params"); if (valueCfg.has("mapping")) mapping = valueCfg.getJSONObject("mapping"); // TODO: distPropValue should be reusable, because we may generate streams of services distPropValue = GeneratorUtil.createValueDistribution(clazz, params, seed++); } for (ComputingElement e : elements) { HumanComputingElement element = (HumanComputingElement)e; if (GeneratorUtil.shouldHave(distPropToHave, pToHave)) { if (type.equals("static") || type.equals("skill")) { GeneratorUtil.generateProperty(element, name, type, distPropValue, mapping); } else if (type.equals("metric")) { String ifaceClazz = prop.getString("interfaceClass"); MetricMonitorInterface metric = GeneratorUtil.createMetricObject(ifaceClazz); element.getMetrics().setInterface(name, metric); } } } } // generate connections double pToConnect = connCfg.getDouble("probabilityToConnect"); JSONObject weightCfg = connCfg.getJSONObject("weight"); String weightClazz = GeneratorUtil.getFullClassName(weightCfg.getString("class")); JSONArray weightParams = weightCfg.getJSONArray("params"); if (distConnToHave==null) distConnToHave = new UniformRealDistribution(new MersenneTwister(seed++), 0, 1); if (distConnWeight==null) distConnWeight = GeneratorUtil.createValueDistribution(weightClazz, weightParams, seed++); //logger.info("Generating connections..."); for (int i=0; i<elements.size(); i++) { ComputingElement e = elements.get(i); for (int j=i+1; j<elements.size(); j++) { if (i!=j) { if (GeneratorUtil.shouldHave(distConnToHave, pToConnect)) { double weight = (double)GeneratorUtil.sample(distConnWeight); e.setConnection(elements.get(j), weight); elements.get(j).setConnection(e, weight); } } } } if (configRoot.has("singleElementSingleServices") && configRoot.getBoolean("singleElementSingleServices")) { services = generateSingleServices(svcCfg, elements); } else { services = generateMultipleServices(svcCfg, elements); } System.out.println("=== Generated services ==="); printServiceList(services); System.out.println("=========================="); return services; } private ArrayList<Service> generateMultipleServices(JSONArray svcCfg, ArrayList<ComputingElement> elements) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, SecurityException, ClassNotFoundException, JSONException { ArrayList<Service> services = new ArrayList<Service>(); // generate services for (int i=0; i<svcCfg.length(); i++) { // get config JSONObject svc = svcCfg.getJSONObject(i); String func = svc.getString("functionality"); double pSvcToHave = svc.getDouble("probabilityToHave"); JSONArray prop = svc.getJSONArray("properties"); //logger.info("Generating service " + func + "..."); // init config and distributions distSvcToHave = new UniformRealDistribution(new MersenneTwister(seed++), 0, 1); int propLength = prop.length(); String[] types = new String[propLength]; String[] names = new String[propLength]; String[] clazzes = new String[propLength]; distSvcPropValues = new Object[propLength]; double[] pToHaves = new double[propLength]; JSONArray[] params = new JSONArray[propLength]; JSONObject[] mappings = new JSONObject[propLength]; JSONObject[] valueCfgs = new JSONObject[propLength]; distSvcPropToHaves = new UniformRealDistribution[propLength]; for (int j=0; j<prop.length(); j++) { JSONObject curProp = prop.getJSONObject(j); types[j] = curProp.getString("type"); names[j] = curProp.getString("name"); pToHaves[j] = curProp.getDouble("probabilityToHave"); valueCfgs[j] = curProp.getJSONObject("value"); params[j] = valueCfgs[j].getJSONArray("params"); clazzes[j] = GeneratorUtil.getFullClassName(valueCfgs[j].getString("class")); mappings[j] = null; if (valueCfgs[j].has("mapping")) mappings[j] = valueCfgs[j].getJSONObject("mapping"); distSvcPropValues[j] = GeneratorUtil.createValueDistribution(clazzes[j], params[j], seed++); distSvcPropToHaves[j] = new UniformRealDistribution (new MersenneTwister(seed++), 0, 1); } for (ComputingElement e : elements) { if (GeneratorUtil.shouldHave(distSvcToHave, pSvcToHave)) { HumanComputingElement element = (HumanComputingElement)e; // add service Service service = new Service(new Functionality(func), element); element.addService(service); services.add(service); for (int j=0; j<prop.length(); j++) { if (GeneratorUtil.shouldHave(distSvcPropToHaves[j], pToHaves[j])) { GeneratorUtil.generateProperty(element, names[j], types[j], distSvcPropValues[j], mappings[j]); } } } } } return services; } private ArrayList<Service> generateSingleServices(JSONArray svcCfg, ArrayList<ComputingElement> elements) throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, SecurityException, ClassNotFoundException, JSONException { ArrayList<Service> services = new ArrayList<Service>(); // get probability to have for all services double[] probabilityToHave = new double[svcCfg.length()]; for (int i=0; i<svcCfg.length(); i++) { JSONObject svc = svcCfg.getJSONObject(i); probabilityToHave[i] = svc.getDouble("probabilityToHave"); } // generate services distSvcToHave = new UniformRealDistribution(new MersenneTwister(seed++), 0, 1); for (ComputingElement e : elements) { int serviceId = GeneratorUtil.selectOne(distSvcToHave, probabilityToHave); // get config for the selected service JSONObject svc = svcCfg.getJSONObject(serviceId); String func = svc.getString("functionality"); JSONArray prop = svc.getJSONArray("properties"); // init config and distributions int propLength = prop.length(); String[] types = new String[propLength]; String[] names = new String[propLength]; String[] clazzes = new String[propLength]; distSvcPropValues = new Object[propLength]; double[] pToHaves = new double[propLength]; JSONArray[] params = new JSONArray[propLength]; JSONObject[] mappings = new JSONObject[propLength]; JSONObject[] valueCfgs = new JSONObject[propLength]; distSvcPropToHaves = new UniformRealDistribution[propLength]; for (int j=0; j<prop.length(); j++) { JSONObject curProp = prop.getJSONObject(j); types[j] = curProp.getString("type"); names[j] = curProp.getString("name"); pToHaves[j] = curProp.getDouble("probabilityToHave"); valueCfgs[j] = curProp.getJSONObject("value"); params[j] = valueCfgs[j].getJSONArray("params"); clazzes[j] = GeneratorUtil.getFullClassName(valueCfgs[j].getString("class")); mappings[j] = null; if (valueCfgs[j].has("mapping")) mappings[j] = valueCfgs[j].getJSONObject("mapping"); distSvcPropValues[j] = GeneratorUtil.createValueDistribution(clazzes[j], params[j], seed++); distSvcPropToHaves[j] = new UniformRealDistribution (new MersenneTwister(seed++), 0, 1); } // add service HumanComputingElement element = (HumanComputingElement)e; Service service = new Service(new Functionality(func), element); element.addService(service); services.add(service); // add related properties to the element for (int j=0; j<prop.length(); j++) { if (GeneratorUtil.shouldHave(distSvcPropToHaves[j], pToHaves[j])) { GeneratorUtil.generateProperty(element, names[j], types[j], distSvcPropValues[j], mappings[j]); } } } return services; } public void printServiceList(List<Service> services) { for (Service s: services) { System.out.println(s); } } }
/* See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * Esri Inc. licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.esri.gpt.framework.security.identity.local; import com.esri.gpt.framework.security.credentials.ChangePasswordCriteria; import com.esri.gpt.framework.security.credentials.CredentialPolicyException; import com.esri.gpt.framework.security.credentials.Credentials; import com.esri.gpt.framework.security.credentials.CredentialsDeniedException; import com.esri.gpt.framework.security.credentials.DistinguishedNameCredential; import com.esri.gpt.framework.security.credentials.RecoverPasswordCriteria; import com.esri.gpt.framework.security.credentials.UsernameCredential; import com.esri.gpt.framework.security.credentials.UsernamePasswordCredentials; import com.esri.gpt.framework.security.identity.IdentityAdapter; import com.esri.gpt.framework.security.identity.IdentityException; import com.esri.gpt.framework.security.principal.Group; import com.esri.gpt.framework.security.principal.Role; import com.esri.gpt.framework.security.principal.RoleSet; import com.esri.gpt.framework.security.principal.Roles; import com.esri.gpt.framework.security.principal.User; import com.esri.gpt.framework.security.principal.Users; import java.sql.SQLException; import javax.naming.NamingException; /** * Identity adapter for a simple installation of one known user (the administrator). */ public class SimpleIdentityAdapter extends IdentityAdapter { // class variables ============================================================= // instance variables ========================================================== // constructors ================================================================ /** Default constructor. */ public SimpleIdentityAdapter() { super(); } // properties ================================================================== /** * Gets the password for the one known user. * @return the password */ private UsernamePasswordCredentials getCredentials() { return getApplicationConfiguration().getIdentityConfiguration().getSimpleConfiguration().getServiceAccountCredentials(); } /** * Gets the distinguished name for the one known user. * @return the distinguished name */ private String getDN() { return getCredentials().getDistinguishedName(); } /** * Gets the password for the one known user. * @return the password */ private String getPassword() { return getCredentials().getPassword(); } /** * Gets the username for the one known user. * @return the username */ private String getUsername() { return getCredentials().getUsername(); } // methods ===================================================================== /** * Adds user to role. * @param user the subject user * @param role the subject role * @throws CredentialPolicyException if the credentials are invalid * @throws IdentityException if a system error occurs preventing the action * @throws NamingException if an LDAP naming exception occurs * @throws SQLException if a database communication exception occurs */ @Override public void addUserToRole(User user, String role) throws CredentialPolicyException, IdentityException, NamingException, SQLException { } /** * Authenticates a user. * @param user the subject user * @throws CredentialsDeniedException if credentials are denied * @throws IdentityException if a system error occurs preventing authentication * @throws SQLException if a database communication exception occurs */ @Override public void authenticate(User user) throws CredentialsDeniedException, IdentityException, SQLException { boolean bAuthenticated = false; user.getAuthenticationStatus().reset(); // authenticate Credentials credentials = user.getCredentials(); if (credentials != null) { if (credentials instanceof UsernamePasswordCredentials) { UsernamePasswordCredentials upCreds = (UsernamePasswordCredentials)credentials; bAuthenticated = (getUsername().length() > 0) && (getPassword().length() > 0) && upCreds.getUsername().equalsIgnoreCase(getUsername()) && upCreds.getPassword().equals(getPassword()); } else if (credentials instanceof DistinguishedNameCredential) { DistinguishedNameCredential dnCred = (DistinguishedNameCredential)credentials; bAuthenticated = (getDN().length() > 0) && dnCred.getDistinguishedName().equalsIgnoreCase(getDN()); } else if (credentials instanceof UsernameCredential) { UsernameCredential unCred = (UsernameCredential)credentials; bAuthenticated = (getUsername().length() > 0) && unCred.getUsername().equalsIgnoreCase(getUsername()); } } // setup the authenticated user if (bAuthenticated) { user.setDistinguishedName(getDN()); user.setKey(user.getDistinguishedName()); user.setName(getUsername()); user.getProfile().setUsername(user.getName()); user.getAuthenticationStatus().setWasAuthenticated(true); // set role/group properties try { readUserGroups(user); RoleSet authRoles = user.getAuthenticationStatus().getAuthenticatedRoles(); Roles cfgRoles = getApplicationConfiguration().getIdentityConfiguration().getConfiguredRoles(); for (Role role: cfgRoles.values()) { authRoles.addAll(role.getFullRoleSet()); } } catch (NamingException e) { // will never be thrown } // ensure a local reference for the user LocalDao localDao = new LocalDao(getRequestContext()); localDao.ensureReferenceToRemoteUser(user); } if (!bAuthenticated) { throw new CredentialsDeniedException("Invalid credentials."); } } /** * Changes the password for a user. * @param user the subject user * @param criteria the criteria associated with the password change * @throws CredentialPolicyException if the credentials are invalid * @throws IdentityException if a system error occurs preventing the action * @throws NamingException if an LDAP naming exception occurs * @throws SQLException if a database communication exception occurs */ @Override public void changePassword(User user, ChangePasswordCriteria criteria) throws CredentialsDeniedException, CredentialPolicyException, IdentityException, NamingException, SQLException { throwNotSupportedException("changePassword"); } /** * Reads the members of a group. * @param groupDN the distinguished name for the group * @return the collection of users belonging to the group * @throws IdentityException if a system error occurs preventing the action * @throws NamingException if an LDAP naming exception occurs * @throws SQLException if a database communication exception occurs */ @Override public Users readGroupMembers(String groupDN) throws IdentityException, NamingException, SQLException { Users users = new Users(); User user = new User(); user.setDistinguishedName(getDN()); user.setKey(user.getDistinguishedName()); user.setName(getUsername()); users.add(user); return users; } /** * Reads the groups to which a user belongs. * @param user the subject user * @throws IdentityException if a system error occurs preventing the action * @throws NamingException if an LDAP naming exception occurs * @throws SQLException if a database communication exception occurs */ @Override public void readUserGroups(User user) throws IdentityException, NamingException, SQLException { if ((user != null) && (user.getDistinguishedName().equalsIgnoreCase(getDN()))) { Roles cfgRoles = getApplicationConfiguration().getIdentityConfiguration().getConfiguredRoles(); for (Role role: cfgRoles.values()) { Group group = new Group(); group.setDistinguishedName(role.getKey()); group.setKey(role.getKey()); group.setName(role.getKey()); user.getGroups().add(group); } } } /** * Reads the profile attributes for a user. * @param user the subject user * @throws IdentityException if a system error occurs preventing the action * @throws NamingException if an LDAP naming exception occurs * @throws SQLException if a database communication exception occurs */ @Override public void readUserProfile(User user) throws IdentityException, NamingException, SQLException { if (user != null) { user.getProfile().setUsername(user.getName()); } } /** * Recovers a user password. * @param criteria the criteria associated with the password recovery * @return the user associated with the recovered credentials (null if no match) * @throws IdentityException if a system error occurs preventing the action * @throws NamingException if an LDAP naming exception occurs * @throws SQLException if a database communication exception occurs */ @Override public User recoverPassword(RecoverPasswordCriteria criteria) throws IdentityException, NamingException, SQLException { throwNotSupportedException("recoverPassword"); return null; } /** * Registers a new user. * @param user the subject user * @throws CredentialPolicyException if the credentials are invalid * @throws IdentityException if a system error occurs preventing the action * @throws NamingException if an LDAP naming exception occurs * @throws SQLException if a database communication exception occurs */ @Override public void registerUser(User user) throws CredentialPolicyException, IdentityException, NamingException, SQLException { throwNotSupportedException("registerUser"); } /** * Updates the profile attributes for a user. * @param user the subject user * @throws CredentialPolicyException if the credentials are invalid * @throws IdentityException if a system error occurs preventing the action * @throws NamingException if an LDAP naming exception occurs * @throws SQLException if a database communication exception occurs */ @Override public void updateUserProfile(User user) throws CredentialPolicyException, IdentityException, NamingException, SQLException { throwNotSupportedException("updateUserProfile"); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.raid; import java.util.ArrayList; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hdfs.RecoverTreeNode; import org.json.*; public class SimpleRegeneratingCode extends ErasureCode { public static final Log LOG = LogFactory .getLog(SimpleRegeneratingCode.class); private int stripeSize; private int paritySize; private int paritySizeSRC; private int paritySizeRS; private int simpleParityDegree; private int[] generatingPolynomial; private int PRIMITIVE_ROOT = 2; private int[] primitivePower; private GaloisField GF = GaloisField.getInstance(); private int[] errSignature; private int[] dataBuff; private int[][] groupsTable; @Deprecated public SimpleRegeneratingCode(int stripeSize, int paritySize) { init(stripeSize, paritySize); } public SimpleRegeneratingCode() { } @Override public void init(Codec codec) { try { this.paritySizeSRC = codec.json.getInt("parity_length_src"); } catch (JSONException e) { e.printStackTrace(); } init(codec.stripeLength, codec.parityLength); LOG.info(" Initialized " + SimpleRegeneratingCode.class + " stripeLength:" + codec.stripeLength + " parityLength:" + codec.parityLength + " SRC parities:" + paritySizeSRC); } private void init(int stripeSize, int paritySize) { this.stripeSize = stripeSize; this.paritySize = paritySize; this.paritySizeRS = paritySize - paritySizeSRC; assert (stripeSize + paritySizeRS < GF.getFieldSize()); assert (paritySize >= paritySizeSRC); // The degree of a simple parity is the number of locations // combined into the single parity. The degree is a function // of the RS-stripe (stripe + RS parity) length. // (The number of SRC groups is paritySizeSRC + 1, because // one SRC parity is implied -- not stored). simpleParityDegree = (int) Math .ceil((double) (stripeSize + paritySizeRS) / (double) (paritySizeSRC + 1)); while (simpleParityDegree * paritySizeSRC >= stripeSize + paritySizeRS) { LOG.info("\nInvalid code parameters." + " Reducing SRC parities to " + (paritySizeSRC - 1) + " Increasing RS parities to " + (paritySizeRS + 1)); this.paritySizeSRC--; this.paritySizeRS++; simpleParityDegree = (int) Math .ceil((double) (stripeSize + paritySizeRS) / (double) (paritySizeSRC + 1)); } this.errSignature = new int[paritySizeRS]; this.dataBuff = new int[paritySizeRS + stripeSize]; this.primitivePower = new int[stripeSize + paritySizeRS]; // compute powers of the primitive root for (int i = 0; i < stripeSize + paritySizeRS; i++) { primitivePower[i] = GF.power(PRIMITIVE_ROOT, i); } // compute generating polynomial int[] gen = { 1 }; int[] poly = new int[2]; for (int i = 0; i < paritySizeRS; i++) { poly[0] = primitivePower[i]; poly[1] = 1; gen = GF.multiply(gen, poly); } // generating polynomial has all generating roots generatingPolynomial = gen; // groupsTable[][] // groupsTable[loc]: the SRC group neighbors of location loc. groupsTable = new int[paritySize + stripeSize][]; for (int i = 0; i < groupsTable.length; i++) { List<Integer> locationsInGroup = getSRCGroupNeighbors(i); groupsTable[i] = new int[locationsInGroup.size()]; int k = 0; for (int loc : locationsInGroup) groupsTable[i][k++] = loc; } } @Override public void encode(int[] message, int[] parity) { assert (message.length == stripeSize && parity.length == paritySize); // initialize data buffer for (int i = 0; i < paritySizeRS; i++) { dataBuff[i] = 0; } // put message in the data buffer for (int i = 0; i < stripeSize; i++) { dataBuff[i + paritySizeRS] = message[i]; } // calculate RS parities and copy into parity[] GF.remainder(dataBuff, generatingPolynomial); for (int i = 0; i < paritySizeRS; i++) { parity[i + paritySizeSRC] = dataBuff[i]; } // restore message in dataBuff for (int i = 0; i < stripeSize; i++) { dataBuff[i + paritySizeRS] = message[i]; } // compute the SRC parities and store into parity[] for (int i = 0; i < paritySizeSRC; i++) { parity[i] = 0; for (int j = simpleParityDegree * i; j < simpleParityDegree * (i + 1); j++) parity[i] = GF.add(dataBuff[j], parity[i]); } } @Override /** * added by jason */ public void encode(int[] message, int[] parity, int[] dataBuffer) { // TODO Auto-generated method stub assert (message.length == stripeSize && parity.length == paritySize); // initialize data buffer for (int i = 0; i < paritySizeRS; i++) { dataBuffer[i] = 0; } // put message in the data buffer for (int i = 0; i < stripeSize; i++) { dataBuffer[i + paritySizeRS] = message[i]; } // calculate RS parities and copy into parity[] GF.remainder(dataBuffer, generatingPolynomial); for (int i = 0; i < paritySizeRS; i++) { parity[i + paritySizeSRC] = dataBuffer[i]; } // restore message in dataBuffer for (int i = 0; i < stripeSize; i++) { dataBuffer[i + paritySizeRS] = message[i]; } // compute the SRC parities and store into parity[] for (int i = 0; i < paritySizeSRC; i++) { parity[i] = 0; for (int j = simpleParityDegree * i; j < simpleParityDegree * (i + 1); j++) parity[i] = GF.add(dataBuffer[j], parity[i]); } } /** * added by jason ended */ /* * Perform Reed Solomon decoding. */ private void decodeReedSolomon(int[] data, int[] erasedLocations, int[] erasedValues) { if (erasedLocations.length == 0) return; assert (erasedLocations.length == erasedValues.length); assert (erasedLocations.length <= paritySizeRS); for (int i = 0; i < erasedLocations.length; i++) { data[erasedLocations[i]] = 0; } for (int i = 0; i < erasedLocations.length; i++) { errSignature[i] = primitivePower[erasedLocations[i]]; erasedValues[i] = GF.substitute(data, primitivePower[i]); } GF.solveVandermondeSystem(errSignature, erasedValues, erasedLocations.length); } /** * Perform Reed Solomon decoding in a parallel way. * added by jason */ private void decodeReedSolomonParallel(int[] data, int[] erasedLocations, int[] erasedValues) { if (erasedLocations.length == 0) return; assert (erasedLocations.length == erasedValues.length); assert (erasedLocations.length <= paritySizeRS); for (int i = 0; i < erasedLocations.length; i++) { data[erasedLocations[i]] = 0; } int[] errSignatureLocal = new int[paritySize]; for (int i = 0; i < erasedLocations.length; i++) { errSignatureLocal[i] = primitivePower[erasedLocations[i]]; erasedValues[i] = GF.substitute(data, primitivePower[i]); } GF.solveVandermondeSystem(errSignatureLocal, erasedValues, erasedLocations.length); } /** * added by jason ended */ /* * Performs Reed Solomon decoding, assuming that all positions not included * in erasedLocations are available. */ @Override public void decode(int[] data, int[] erasedLocations, int[] erasedValues) { decodeReedSolomon(data, erasedLocations, erasedValues); } @Override public void decode(int[] data, int[] erasedLocations, int[] erasedValues, int[] locationsToRead, int[] locationsNotToRead) { assert (erasedLocations.length == erasedValues.length); // CASE 1 : SINGLE ERASURE // If only one erasure is passed, perform a quick repair using the // local group (locationsToRead). if (erasedLocations.length == 1) { erasedValues[0] = 0; for (int i = 0; i < locationsToRead.length; i++) { erasedValues[0] = GF.add(data[locationsToRead[i]], erasedValues[0]); } return; } // CASE 2 : MULTIPLE ERASURES - NO CONFLICT if (!groupConflict(erasedLocations)) { for (int i = 0; i < erasedLocations.length; i++) { int[] singleErasedLocation = new int[1]; singleErasedLocation[0] = erasedLocations[i]; int[] singleErasedValue = new int[1]; singleErasedValue[0] = 0; decode(data, singleErasedLocation, singleErasedValue, groupsTable[erasedLocations[i]], null); erasedValues[i] = singleErasedValue[0]; } return; } // CASE 3 : MULTIPLE ERASURES - CONFLICT // According to locationsToReadForDecode(), locationsToRead should // be of length equal to stripeSize for RS decoding. assert (locationsToRead.length == stripeSize); assert (locationsNotToRead.length == stripeSize + paritySize - locationsToRead.length); // count the number of src parities that are erased int numOferasedSRCparities = 0; for (int i = 0; i < erasedLocations.length; i++) { if (erasedLocations[i] < paritySizeSRC) numOferasedSRCparities++; } int[] dataRS = new int[paritySizeRS + stripeSize]; for (int i = 0; i < paritySizeRS + stripeSize; i++) { dataRS[i] = data[i + paritySizeSRC]; } /* * erasedLocationsRS contains actual erased locations of the RS stripe * plus some locations that are not supposed to be read. */ int[] erasedLocationsRS = new int[locationsNotToRead.length - this.paritySizeSRC]; int k = 0; for (int i = 0; i < locationsNotToRead.length; i++) { if (locationsNotToRead[i] >= paritySizeSRC) erasedLocationsRS[k++] = locationsNotToRead[i] - paritySizeSRC; } int[] erasedValuesRS = new int[erasedLocationsRS.length]; decodeReedSolomon(dataRS, erasedLocationsRS, erasedValuesRS); for (int i = 0; i < erasedLocationsRS.length; i++) { data[paritySizeSRC + erasedLocationsRS[i]] = erasedValuesRS[i]; } // now that the RS part is all fixed, fix the simple parities for (int i = 0; i < erasedLocations.length; i++) { if (erasedLocations[i] < paritySizeSRC) { int par = erasedLocations[i]; data[par] = 0; for (int j = 0; j < groupsTable[erasedLocations[i]].length; j++) { data[par] = GF .add(data[groupsTable[erasedLocations[i]][j]], data[par]); } } } for (int i = 0; i < erasedLocations.length; i++) { erasedValues[i] = data[erasedLocations[i]]; } return; } @Override /** * added by jason */ public void decodeParallel(int[] data, int[] erasedLocations, int[] erasedValues, int[] locationsToRead, int[] locationsNotToRead) { assert (erasedLocations.length == erasedValues.length); // CASE 1 : SINGLE ERASURE // If only one erasure is passed, perform a quick repair using the // local group (locationsToRead). if (erasedLocations.length == 1) { erasedValues[0] = 0; for (int i = 0; i < locationsToRead.length; i++) { erasedValues[0] = GF.add(data[locationsToRead[i]], erasedValues[0]); } return; } // CASE 2 : MULTIPLE ERASURES - NO CONFLICT if (!groupConflict(erasedLocations)) { for (int i = 0; i < erasedLocations.length; i++) { int[] singleErasedLocation = new int[1]; singleErasedLocation[0] = erasedLocations[i]; int[] singleErasedValue = new int[1]; singleErasedValue[0] = 0; decodeParallel(data, singleErasedLocation, singleErasedValue, groupsTable[erasedLocations[i]], null); erasedValues[i] = singleErasedValue[0]; } return; } // CASE 3 : MULTIPLE ERASURES - CONFLICT // According to locationsToReadForDecode(), locationsToRead should // be of length equal to stripeSize for RS decoding. assert (locationsToRead.length == stripeSize); assert (locationsNotToRead.length == stripeSize + paritySize - locationsToRead.length); // count the number of src parities that are erased int numOferasedSRCparities = 0; for (int i = 0; i < erasedLocations.length; i++) { if (erasedLocations[i] < paritySizeSRC) numOferasedSRCparities++; } int[] dataRS = new int[paritySizeRS + stripeSize]; for (int i = 0; i < paritySizeRS + stripeSize; i++) { dataRS[i] = data[i + paritySizeSRC]; } /* * erasedLocationsRS contains actual erased locations of the RS stripe * plus some locations that are not supposed to be read. */ int[] erasedLocationsRS = new int[locationsNotToRead.length - this.paritySizeSRC]; int k = 0; for (int i = 0; i < locationsNotToRead.length; i++) { if (locationsNotToRead[i] >= paritySizeSRC) erasedLocationsRS[k++] = locationsNotToRead[i] - paritySizeSRC; } int[] erasedValuesRS = new int[erasedLocationsRS.length]; decodeReedSolomonParallel(dataRS, erasedLocationsRS, erasedValuesRS); for (int i = 0; i < erasedLocationsRS.length; i++) { data[paritySizeSRC + erasedLocationsRS[i]] = erasedValuesRS[i]; } // now that the RS part is all fixed, fix the simple parities for (int i = 0; i < erasedLocations.length; i++) { if (erasedLocations[i] < paritySizeSRC) { int par = erasedLocations[i]; data[par] = 0; for (int j = 0; j < groupsTable[erasedLocations[i]].length; j++) { data[par] = GF .add(data[groupsTable[erasedLocations[i]][j]], data[par]); } } } for (int i = 0; i < erasedLocations.length; i++) { erasedValues[i] = data[erasedLocations[i]]; } return; } /** * added by jason ended */ @Override public int stripeSize() { return this.stripeSize; } @Override public int paritySize() { return this.paritySize; } @Override public int symbolSize() { return (int) Math.round(Math.log(GF.getFieldSize()) / Math.log(2)); } /** * Figure out which locations need to be read to decode erased locations. * The locations are specified as integers in the range [ 0, stripeSize() + * paritySize() ). Values in the range [ 0, paritySize() ) represent parity * data. Values in the range [ paritySize(), paritySize() + stripeSize() ) * represent message data. * * @param erasedLocations * The erased locations. * @return The locations to read. */ @Override public List<Integer> locationsToReadForDecode(List<Integer> erasedLocations) throws TooManyErasedLocations { // LOG.info("Erased locations: "+erasedLocations.toString()); List<Integer> locationsToRead; // If only one location is erased, return its local (src) group if (erasedLocations.size() == 1) { // locationsToRead = // computeSRCGroupLocations(erasedLocations.get(0)); int loc = erasedLocations.get(0); locationsToRead = new ArrayList<Integer>(groupsTable[loc].length); for (int i = 0; i < groupsTable[loc].length; i++) { locationsToRead.add(groupsTable[loc][i]); } // LOG.info("locations to read: "+locationsToRead.toString()); return locationsToRead; } // If more than one location are erased, check if they belong to the // same group. // If they do not belong to same group (- no conflict), add locations of // each // separate group. int[] erasedLocationsArray = new int[erasedLocations.size()]; for (int i = 0; i < erasedLocations.size(); i++) { erasedLocationsArray[i] = erasedLocations.get(i); } if (!groupConflict(erasedLocationsArray)) { // we expect approximately simpleParityDegree locations to be read // for each // erased location. locationsToRead = new ArrayList<Integer>(erasedLocations.size() * simpleParityDegree); // add unique locations for (int loc : erasedLocations) { for (int i = 0; i < groupsTable[loc].length; i++) { if (!locationsToRead.contains(groupsTable[loc][i])) locationsToRead.add(groupsTable[loc][i]); } } return locationsToRead; } // If more than one location is erased and there is at least a pair // belonging to the // same group, we will have to perform Reed Solomon (RS) decoding. // Hence, read // locations that are necessary for RS decoding. locationsToRead = new ArrayList<Integer>(stripeSize()); int limit = stripeSize() + paritySize(); // Loop through all possible locations in the stripe, omitting the SRC // parities. for (int loc = paritySizeSRC; loc < limit; loc++) { // Is the location good. if (erasedLocations.indexOf(loc) == -1) { locationsToRead.add(loc); if (stripeSize() == locationsToRead.size()) { break; } } } // If we are are not able to fill up the locationsToRead list, // we did not find enough good locations. Throw TooManyErasedLocations. if (locationsToRead.size() != stripeSize()) { String locationsStr = ""; for (Integer erasedLocation : erasedLocations) { locationsStr += " " + erasedLocation; } throw new TooManyErasedLocations("Locations " + locationsStr); } return locationsToRead; } /* * Given a location loc, return a list with the other locations belonging to * the same SRC group as loc. */ private List<Integer> getSRCGroupNeighbors(int loc) { int limit = stripeSize() + paritySize(); /* * A group is expected to have at most simpleParityDegree + 1 locations. * groupLocations will contain the simpleParityDegree neighbors of loc. */ List<Integer> neighbors = new ArrayList<Integer>(simpleParityDegree); int group = getSRCGroup(loc); // CASE 1: Group with "stored" SRC parity. if (group < paritySizeSRC) { if (group != loc) // group equals the location of the SRC parity // Hence, add the location to the neighbors. neighbors.add(group); // add the rest neighbors (loc is excluded) for (int i = paritySizeSRC + group * simpleParityDegree; i < paritySizeSRC + (group + 1) * simpleParityDegree; i++) { if (i != loc) neighbors.add(i); } }// CASE 2: Group is the one with the "inferred" SRC parity. else { assert (loc >= paritySizeSRC); // All SRC parities are neighbors. for (int i = 0; i < paritySizeSRC; i++) { neighbors.add(i); } // Add the remaining (non SRC-parity) neighbors. for (int i = paritySizeSRC + group * simpleParityDegree; i < limit; i++) { if (i != loc) neighbors.add(i); } } return neighbors; } /* * Return the id of the SRC group location loc belongs to. Return -1 for * invalid loc. */ private int getSRCGroup(int loc) { int group = -1; if (0 <= loc && loc < paritySizeSRC) group = loc; else if (paritySizeSRC <= loc && loc < stripeSize + paritySize) group = (int) (loc - paritySizeSRC) / simpleParityDegree; else group = -1; return group; } /* * Check for conflict (- whether any two locations in locs belong to the * same SRC group. */ private boolean groupConflict(int[] locs) { int[] groups = new int[paritySizeSRC + 1]; for (int i = 0; i < groups.length; i++) { groups[i] = 0; } /* * if at least one position in locs is SRC parity, mark the last group. */ for (int i = 0; i < locs.length; i++) { if (locs[i] < paritySizeSRC) { groups[paritySizeSRC] = 1; break; } } for (int i = 0; i < locs.length; i++) { if (groups[getSRCGroup(locs[i])]++ > 0) return true; } return false; } @Override public CandidateLocations getCandidateLocations(Stripe stripe, int locationToReconstruct) { // TODO Auto-generated method stub return null; } @Override public int[] getLocationsToUse(Stripe stripe, RecoverTreeNode[] nodes, int[] choosed, int locationToReconstruct) { // TODO Auto-generated method stub return null; } @Override int[] getRecoverVector(int[] dataLocations, int locationToReconstruct) { // TODO Auto-generated method stub return null; } }
package nok; import nok.util.JsfUtil; import nok.util.PaginationHelper; import java.io.Serializable; import java.util.ResourceBundle; import javax.ejb.EJB; import javax.inject.Named; import javax.enterprise.context.SessionScoped; import javax.faces.component.UIComponent; import javax.faces.context.FacesContext; import javax.faces.convert.Converter; import javax.faces.convert.FacesConverter; import javax.faces.model.DataModel; import javax.faces.model.ListDataModel; import javax.faces.model.SelectItem; @Named("receivePascelController") @SessionScoped public class ReceivePascelController implements Serializable { private ReceivePascel current; private DataModel items = null; @EJB private nok.ReceivePascelFacade ejbFacade; private PaginationHelper pagination; private int selectedItemIndex; public ReceivePascelController() { } public ReceivePascel getSelected() { if (current == null) { current = new ReceivePascel(); selectedItemIndex = -1; } return current; } private ReceivePascelFacade getFacade() { return ejbFacade; } public PaginationHelper getPagination() { if (pagination == null) { pagination = new PaginationHelper(10) { @Override public int getItemsCount() { return getFacade().count(); } @Override public DataModel createPageDataModel() { return new ListDataModel(getFacade().findRange(new int[]{getPageFirstItem(), getPageFirstItem() + getPageSize()})); } }; } return pagination; } public String prepareList() { recreateModel(); return "List"; } public String prepareView() { current = (ReceivePascel) getItems().getRowData(); selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex(); return "View"; } public String prepareCreate() { current = new ReceivePascel(); selectedItemIndex = -1; return "Create"; } public String create() { try { getFacade().create(current); JsfUtil.addSuccessMessage(ResourceBundle.getBundle("/Bundle").getString("ReceivePascelCreated")); return prepareCreate(); } catch (Exception e) { JsfUtil.addErrorMessage(e, ResourceBundle.getBundle("/Bundle").getString("PersistenceErrorOccured")); return null; } } public String prepareEdit() { current = (ReceivePascel) getItems().getRowData(); selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex(); return "Edit"; } public String update() { try { getFacade().edit(current); JsfUtil.addSuccessMessage(ResourceBundle.getBundle("/Bundle").getString("ReceivePascelUpdated")); return "View"; } catch (Exception e) { JsfUtil.addErrorMessage(e, ResourceBundle.getBundle("/Bundle").getString("PersistenceErrorOccured")); return null; } } public String destroy() { current = (ReceivePascel) getItems().getRowData(); selectedItemIndex = pagination.getPageFirstItem() + getItems().getRowIndex(); performDestroy(); recreatePagination(); recreateModel(); return "List"; } public String destroyAndView() { performDestroy(); recreateModel(); updateCurrentItem(); if (selectedItemIndex >= 0) { return "View"; } else { // all items were removed - go back to list recreateModel(); return "List"; } } private void performDestroy() { try { getFacade().remove(current); JsfUtil.addSuccessMessage(ResourceBundle.getBundle("/Bundle").getString("ReceivePascelDeleted")); } catch (Exception e) { JsfUtil.addErrorMessage(e, ResourceBundle.getBundle("/Bundle").getString("PersistenceErrorOccured")); } } private void updateCurrentItem() { int count = getFacade().count(); if (selectedItemIndex >= count) { // selected index cannot be bigger than number of items: selectedItemIndex = count - 1; // go to previous page if last page disappeared: if (pagination.getPageFirstItem() >= count) { pagination.previousPage(); } } if (selectedItemIndex >= 0) { current = getFacade().findRange(new int[]{selectedItemIndex, selectedItemIndex + 1}).get(0); } } public DataModel getItems() { if (items == null) { items = getPagination().createPageDataModel(); } return items; } private void recreateModel() { items = null; } private void recreatePagination() { pagination = null; } public String next() { getPagination().nextPage(); recreateModel(); return "List"; } public String previous() { getPagination().previousPage(); recreateModel(); return "List"; } public SelectItem[] getItemsAvailableSelectMany() { return JsfUtil.getSelectItems(ejbFacade.findAll(), false); } public SelectItem[] getItemsAvailableSelectOne() { return JsfUtil.getSelectItems(ejbFacade.findAll(), true); } public ReceivePascel getReceivePascel(java.lang.String id) { return ejbFacade.find(id); } @FacesConverter(forClass = ReceivePascel.class) public static class ReceivePascelControllerConverter implements Converter { @Override public Object getAsObject(FacesContext facesContext, UIComponent component, String value) { if (value == null || value.length() == 0) { return null; } ReceivePascelController controller = (ReceivePascelController) facesContext.getApplication().getELResolver(). getValue(facesContext.getELContext(), null, "receivePascelController"); return controller.getReceivePascel(getKey(value)); } java.lang.String getKey(String value) { java.lang.String key; key = value; return key; } String getStringKey(java.lang.String value) { StringBuilder sb = new StringBuilder(); sb.append(value); return sb.toString(); } @Override public String getAsString(FacesContext facesContext, UIComponent component, Object object) { if (object == null) { return null; } if (object instanceof ReceivePascel) { ReceivePascel o = (ReceivePascel) object; return getStringKey(o.getReceiveID()); } else { throw new IllegalArgumentException("object " + object + " is of type " + object.getClass().getName() + "; expected type: " + ReceivePascel.class.getName()); } } } }
package com.cloud.host; import com.cloud.agent.api.VgpuTypesInfo; import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.resource.ResourceState; import com.cloud.storage.Storage.StoragePoolType; import com.cloud.utils.NumbersUtil; import com.cloud.utils.db.GenericDao; import javax.persistence.Column; import javax.persistence.DiscriminatorColumn; import javax.persistence.DiscriminatorType; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Inheritance; import javax.persistence.InheritanceType; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; @Entity @Table(name = "host") @Inheritance(strategy = InheritanceType.TABLE_PER_CLASS) @DiscriminatorColumn(name = "type", discriminatorType = DiscriminatorType.STRING, length = 32) public class HostVO implements Host { @Column(name = "update_count", updatable = true, nullable = false) protected long updated; // This field should be updated everytime the state is updated. There's no set method in the vo object because it is done with in the dao code. // This is a delayed load value. If the value is null, // then this field has not been loaded yet. // Call host dao to load it. @Transient Map<String, String> details; // This is a delayed load value. If the value is null, // then this field has not been loaded yet. // Call host dao to load it. @Transient List<String> hostTags; // This value is only for saving and current cannot be loaded. @Transient HashMap<String, HashMap<String, VgpuTypesInfo>> groupDetails = new HashMap<>(); @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(name = "id") private long id; @Column(name = "disconnected") @Temporal(value = TemporalType.TIMESTAMP) private Date disconnectedOn; @Column(name = "name", nullable = false) private String name = null; /** * Note: There is no setter for status because it has to be set in the dao code. */ @Column(name = "status", nullable = false) private Status status = null; @Column(name = "type", updatable = true, nullable = false) @Enumerated(value = EnumType.STRING) private Type type; @Column(name = "private_ip_address", nullable = false) private String privateIpAddress; @Column(name = "private_mac_address", nullable = false) private String privateMacAddress; @Column(name = "private_netmask", nullable = false) private String privateNetmask; @Column(name = "public_netmask") private String publicNetmask; @Column(name = "public_ip_address") private String publicIpAddress; @Column(name = "public_mac_address") private String publicMacAddress; @Column(name = "storage_ip_address") private String storageIpAddress; @Column(name = "cluster_id") private Long clusterId; @Column(name = "storage_netmask") private String storageNetmask; @Column(name = "storage_mac_address") private String storageMacAddress; @Column(name = "storage_ip_address_2") private String storageIpAddressDeux; @Column(name = "storage_netmask_2") private String storageNetmaskDeux; @Column(name = "storage_mac_address_2") private String storageMacAddressDeux; @Column(name = "hypervisor_type", updatable = true, nullable = false) @Enumerated(value = EnumType.STRING) private HypervisorType hypervisorType; @Column(name = "proxy_port") private Integer proxyPort; @Column(name = "resource") private String resource; @Column(name = "fs_type") private StoragePoolType fsType; @Column(name = "available") private boolean available = true; @Column(name = "setup") private boolean setup = false; @Column(name = "resource_state", nullable = false) @Enumerated(value = EnumType.STRING) private ResourceState resourceState; @Column(name = "hypervisor_version") private String hypervisorVersion; @Column(name = "uuid") private String uuid; @Column(name = "data_center_id", nullable = false) private long dataCenterId; @Column(name = "pod_id") private Long podId; @Column(name = "cpu_sockets") private Integer cpuSockets; @Column(name = "cpus") private Integer cpus; @Column(name = "url") private String storageUrl; @Column(name = "speed") private Long speed; @Column(name = "ram") private long totalMemory; @Column(name = "parent", nullable = false) private String parent; @Column(name = "guid", updatable = true, nullable = false) private String guid; @Column(name = "capabilities") private String caps; @Column(name = "total_size") private Long totalSize; @Column(name = "last_ping") private long lastPinged; @Column(name = "mgmt_server_id") private Long managementServerId; @Column(name = "dom0_memory") private long dom0MinMemory; @Column(name = "version") private String version; @Column(name = GenericDao.CREATED_COLUMN) private Date created; @Column(name = GenericDao.REMOVED_COLUMN) private Date removed; public HostVO(final String guid) { this.guid = guid; this.status = Status.Creating; this.totalMemory = 0; this.dom0MinMemory = 0; this.resourceState = ResourceState.Creating; this.uuid = UUID.randomUUID().toString(); } protected HostVO() { this.uuid = UUID.randomUUID().toString(); } public HostVO(final long id, final String name, final Type type, final String privateIpAddress, final String privateNetmask, final String privateMacAddress, final String publicIpAddress, final String publicNetmask, final String publicMacAddress, final String storageIpAddress, final String storageNetmask, final String storageMacAddress, final String deuxStorageIpAddress, final String duxStorageNetmask, final String deuxStorageMacAddress, final String guid, final Status status, final String version, final String iqn, final Date disconnectedOn, final long dcId, final Long podId, final long serverId, final long ping, final String parent, final long totalSize, final StoragePoolType fsType) { this(id, name, type, privateIpAddress, privateNetmask, privateMacAddress, publicIpAddress, publicNetmask, publicMacAddress, storageIpAddress, storageNetmask, storageMacAddress, guid, status, version, iqn, disconnectedOn, dcId, podId, serverId, ping, null, null, null, 0, null); this.parent = parent; this.totalSize = totalSize; this.fsType = fsType; this.uuid = UUID.randomUUID().toString(); } public HostVO(final long id, final String name, final Type type, final String privateIpAddress, final String privateNetmask, final String privateMacAddress, final String publicIpAddress, final String publicNetmask, final String publicMacAddress, final String storageIpAddress, final String storageNetmask, final String storageMacAddress, final String guid, final Status status, final String version, final String url, final Date disconnectedOn, final long dcId, final Long podId, final long serverId, final long ping, final Integer cpus, final Long speed, final Long totalMemory, final long dom0MinMemory, final String caps) { this.id = id; this.name = name; this.status = status; this.type = type; this.privateIpAddress = privateIpAddress; this.privateNetmask = privateNetmask; this.privateMacAddress = privateMacAddress; this.publicIpAddress = publicIpAddress; this.publicNetmask = publicNetmask; this.publicMacAddress = publicMacAddress; this.storageIpAddress = storageIpAddress; this.storageNetmask = storageNetmask; this.storageMacAddress = storageMacAddress; this.dataCenterId = dcId; this.podId = podId; this.cpus = cpus; this.version = version; this.speed = speed; this.totalMemory = totalMemory != null ? totalMemory : 0; this.guid = guid; this.parent = null; this.totalSize = null; this.fsType = null; this.managementServerId = serverId; this.lastPinged = ping; this.caps = caps; this.disconnectedOn = disconnectedOn; this.dom0MinMemory = dom0MinMemory; this.storageUrl = url; this.uuid = UUID.randomUUID().toString(); } public boolean isAvailable() { return available; } public void setAvailable(final boolean available) { this.available = available; } public boolean isSetup() { return setup; } public void setSetup(final boolean setup) { this.setup = setup; } public String getResource() { return resource; } public void setResource(final String resource) { this.resource = resource; } public Map<String, String> getDetails() { return details; } public void setDetails(final Map<String, String> details) { this.details = details; } public String getDetail(final String name) { return details != null ? details.get(name) : null; } public void setDetail(final String name, final String value) { assert (details != null) : "Did you forget to load the details?"; details.put(name, value); } public List<String> getHostTags() { return hostTags; } public void setHostTags(final List<String> hostTags) { this.hostTags = hostTags; } public HashMap<String, HashMap<String, VgpuTypesInfo>> getGpuGroupDetails() { return groupDetails; } public void setGpuGroups(final HashMap<String, HashMap<String, VgpuTypesInfo>> groupDetails) { this.groupDetails = groupDetails; } public void setCaps(final String caps) { this.caps = caps; } @Override public String getName() { return name; } public void setName(final String name) { this.name = name; } @Override public Type getType() { return type; } @Override public Date getCreated() { return created; } @Override public Status getStatus() { return status; } @Override public String getPrivateIpAddress() { return privateIpAddress; } public String getStorageUrl() { return storageUrl; } @Override public String getStorageIpAddress() { return storageIpAddress; } public void setStorageIpAddress(final String storageIpAddress) { this.storageIpAddress = storageIpAddress; } @Override public String getGuid() { return guid; } public void setGuid(final String guid) { this.guid = guid; } @Override public Long getTotalMemory() { return totalMemory; } public void setTotalMemory(final long totalMemory) { this.totalMemory = totalMemory; } @Override public Integer getCpuSockets() { return cpuSockets; } public void setCpuSockets(final Integer cpuSockets) { this.cpuSockets = cpuSockets; } @Override public Integer getCpus() { return cpus; } public void setCpus(final Integer cpus) { this.cpus = cpus; } @Override public Long getSpeed() { return speed; } public void setSpeed(final Long speed) { this.speed = speed; } @Override public Integer getProxyPort() { return proxyPort; } @Override public Long getPodId() { return podId; } public void setPodId(final Long podId) { this.podId = podId; } @Override public long getDataCenterId() { return dataCenterId; } public void setDataCenterId(final long dcId) { this.dataCenterId = dcId; } @Override public String getParent() { return parent; } @Override public String getStorageIpAddressDeux() { return storageIpAddressDeux; } public void setStorageIpAddressDeux(final String deuxStorageIpAddress) { this.storageIpAddressDeux = deuxStorageIpAddress; } @Override public HypervisorType getHypervisorType() { return hypervisorType; } @Override public Date getDisconnectedOn() { return disconnectedOn; } public void setDisconnectedOn(final Date disconnectedOn) { this.disconnectedOn = disconnectedOn; } @Override public String getVersion() { return version; } public void setVersion(final String version) { this.version = version; } @Override public long getTotalSize() { return totalSize; } public void setTotalSize(final Long totalSize) { this.totalSize = totalSize; } @Override public String getCapabilities() { return caps; } @Override public long getLastPinged() { return lastPinged; } public void setLastPinged(final long lastPinged) { this.lastPinged = lastPinged; } @Override public Long getManagementServerId() { return managementServerId; } public void setManagementServerId(final Long managementServerId) { this.managementServerId = managementServerId; } @Override public Date getRemoved() { return removed; } @Override public Long getClusterId() { return clusterId; } public void setClusterId(final Long clusterId) { this.clusterId = clusterId; } @Override public String getPublicIpAddress() { return publicIpAddress; } @Override public String getPublicNetmask() { return publicNetmask; } @Override public String getPrivateNetmask() { return privateNetmask; } public void setPrivateNetmask(final String privateNetmask) { this.privateNetmask = privateNetmask; } @Override public String getStorageNetmask() { return storageNetmask; } public void setStorageNetmask(final String storageNetmask) { this.storageNetmask = storageNetmask; } @Override public String getStorageMacAddress() { return storageMacAddress; } @Override public String getPublicMacAddress() { return publicMacAddress; } @Override public String getPrivateMacAddress() { return privateMacAddress; } @Override public String getStorageNetmaskDeux() { return storageNetmaskDeux; } public void setStorageNetmaskDeux(final String deuxStorageNetmask) { this.storageNetmaskDeux = deuxStorageNetmask; } @Override public String getStorageMacAddressDeux() { return storageMacAddressDeux; } public void setStorageMacAddressDeux(final String duexStorageMacAddress) { this.storageMacAddressDeux = duexStorageMacAddress; } @Override public String getHypervisorVersion() { return hypervisorVersion; } public void setHypervisorVersion(final String hypervisorVersion) { this.hypervisorVersion = hypervisorVersion; } @Override public boolean isInMaintenanceStates() { return (getResourceState() == ResourceState.Maintenance || getResourceState() == ResourceState.ErrorInMaintenance || getResourceState() == ResourceState .PrepareForMaintenance); } @Override public ResourceState getResourceState() { return resourceState; } public void setResourceState(final ResourceState state) { resourceState = state; } public void setPrivateMacAddress(final String privateMacAddress) { this.privateMacAddress = privateMacAddress; } public void setPublicMacAddress(final String publicMacAddress) { this.publicMacAddress = publicMacAddress; } public void setStorageMacAddress(final String storageMacAddress) { this.storageMacAddress = storageMacAddress; } public void setPublicNetmask(final String publicNetmask) { this.publicNetmask = publicNetmask; } public void setPublicIpAddress(final String publicIpAddress) { this.publicIpAddress = publicIpAddress; } public void setHypervisorType(final HypervisorType hypervisorType) { this.hypervisorType = hypervisorType; } public void setParent(final String parent) { this.parent = parent; } public void setProxyPort(final Integer port) { proxyPort = port; } public void setStorageUrl(final String url) { this.storageUrl = url; } public void setPrivateIpAddress(final String ipAddress) { this.privateIpAddress = ipAddress; } public void setType(final Type type) { this.type = type; } public StoragePoolType getFsType() { return fsType; } @Override public int hashCode() { return NumbersUtil.hash(id); } @Override public boolean equals(final Object obj) { if (obj instanceof HostVO) { return ((HostVO) obj).getId() == this.getId(); } else { return false; } } @Override public long getId() { return id; } @Override public String toString() { return new StringBuilder("Host[").append("-").append(id).append("-").append(type).append("]").toString(); } @Override @Transient public Status getState() { return status; } public long getUpdated() { return updated; } public long incrUpdated() { updated++; return updated; } @Override public String getUuid() { return this.uuid; } public void setUuid(final String uuid) { this.uuid = uuid; } }
/* * Copyright 2004-2006 Stefan Reuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.asteriskjava.manager.internal; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.ArrayList; import java.util.Date; import java.util.List; import org.asteriskjava.manager.event.AgentCalledEvent; import org.asteriskjava.manager.event.DisconnectEvent; import org.asteriskjava.manager.event.ManagerEvent; import org.asteriskjava.manager.event.ProtocolIdentifierReceivedEvent; import org.asteriskjava.manager.event.RtcpReceivedEvent; import org.asteriskjava.manager.event.StatusCompleteEvent; import org.asteriskjava.manager.response.CommandResponse; import org.asteriskjava.manager.response.ManagerResponse; import org.asteriskjava.util.DateUtil; import org.asteriskjava.util.SocketConnectionFacade; import org.junit.After; import org.junit.Before; import org.junit.Test; public class ManagerReaderImplTest { private Date now; private MockedDispatcher dispatcher; private SocketConnectionFacade socketConnectionFacade; private ManagerReader managerReader; @Before public void setUp() { now = new Date(); DateUtil.overrideCurrentDate(now); dispatcher = new MockedDispatcher(); managerReader = new ManagerReaderImpl(dispatcher, this); socketConnectionFacade = createMock(SocketConnectionFacade.class); } @After public void tearDown() { DateUtil.overrideCurrentDate(null); } @SuppressWarnings("cast") @Test public void testRunWithoutSocket() { try { managerReader.run(); fail("Must throw IllegalStateException"); } catch (IllegalStateException e) { assertTrue("Exception must be of type IllegalStateException", e instanceof IllegalStateException); } } @Test public void testRunReceivingProtocolIdentifier() throws Exception { expect(socketConnectionFacade.readLine()).andReturn("Asterisk Call Manager/1.0"); expect(socketConnectionFacade.readLine()).andReturn(null); replay(socketConnectionFacade); managerReader.setSocket(socketConnectionFacade); managerReader.run(); verify(socketConnectionFacade); assertEquals("not exactly two events dispatched", 2, dispatcher.dispatchedEvents.size()); assertEquals("first event must be a ProtocolIdentifierReceivedEvent", ProtocolIdentifierReceivedEvent.class, dispatcher.dispatchedEvents.get(0).getClass()); assertEquals("ProtocolIdentifierReceivedEvent contains incorrect protocol identifier", "Asterisk Call Manager/1.0", ((ProtocolIdentifierReceivedEvent) dispatcher.dispatchedEvents.get(0)).getProtocolIdentifier()); assertEquals("ProtocolIdentifierReceivedEvent contains incorrect dateReceived", now, dispatcher.dispatchedEvents.get(0).getDateReceived()); assertEquals("second event must be a DisconnectEvent", DisconnectEvent.class, dispatcher.dispatchedEvents.get(1).getClass()); assertEquals("DisconnectEvent contains incorrect dateReceived", now, dispatcher.dispatchedEvents.get(1).getDateReceived()); } @Test public void testRunReceivingEvent() throws Exception { expect(socketConnectionFacade.readLine()).andReturn("Event: StatusComplete"); expect(socketConnectionFacade.readLine()).andReturn(""); expect(socketConnectionFacade.readLine()).andReturn(null); replay(socketConnectionFacade); managerReader.setSocket(socketConnectionFacade); managerReader.run(); verify(socketConnectionFacade); assertEquals("not exactly two events dispatched", 2, dispatcher.dispatchedEvents.size()); assertEquals("first event must be a StatusCompleteEvent", StatusCompleteEvent.class, dispatcher.dispatchedEvents.get(0).getClass()); assertEquals("second event must be a DisconnectEvent", DisconnectEvent.class, dispatcher.dispatchedEvents.get(1).getClass()); } @Test public void testRunReceivingEventWithMapProperty() throws Exception { expect(socketConnectionFacade.readLine()).andReturn("Event: AgentCalled"); expect(socketConnectionFacade.readLine()).andReturn("Variable: var1=val1"); expect(socketConnectionFacade.readLine()).andReturn("Variable: var2=val2"); expect(socketConnectionFacade.readLine()).andReturn(""); expect(socketConnectionFacade.readLine()).andReturn(null); replay(socketConnectionFacade); managerReader.setSocket(socketConnectionFacade); managerReader.run(); verify(socketConnectionFacade); assertEquals("not exactly two events dispatched", 2, dispatcher.dispatchedEvents.size()); assertEquals("first event must be a AgentCalledEvent", AgentCalledEvent.class, dispatcher.dispatchedEvents.get(0).getClass()); AgentCalledEvent event = (AgentCalledEvent) dispatcher.dispatchedEvents.get(0); assertEquals("Returned event is of wrong type", AgentCalledEvent.class, event.getClass()); assertEquals("Property variables[var1] is not set correctly", "val1", event.getVariables().get("var1")); assertEquals("Property variables[var2] is not set correctly", "val2", event.getVariables().get("var2")); assertEquals("Invalid size of variables property", 2, event.getVariables().size()); assertEquals("second event must be an DisconnectEvent", DisconnectEvent.class, dispatcher.dispatchedEvents.get(1).getClass()); } @Test public void testRunReceivingEventWithMapPropertyAndOnlyOneEntry() throws Exception { expect(socketConnectionFacade.readLine()).andReturn("Event: AgentCalled"); expect(socketConnectionFacade.readLine()).andReturn("Variable: var1=val1"); expect(socketConnectionFacade.readLine()).andReturn(""); expect(socketConnectionFacade.readLine()).andReturn(null); replay(socketConnectionFacade); managerReader.setSocket(socketConnectionFacade); managerReader.run(); verify(socketConnectionFacade); assertEquals("not exactly two events dispatched", 2, dispatcher.dispatchedEvents.size()); assertEquals("first event must be a AgentCalledEvent", AgentCalledEvent.class, dispatcher.dispatchedEvents.get(0).getClass()); AgentCalledEvent event = (AgentCalledEvent) dispatcher.dispatchedEvents.get(0); assertEquals("Returned event is of wrong type", AgentCalledEvent.class, event.getClass()); assertEquals("Property variables[var1] is not set correctly", "val1", event.getVariables().get("var1")); assertEquals("Invalid size of variables property", 1, event.getVariables().size()); assertEquals("second event must be an DisconnectEvent", DisconnectEvent.class, dispatcher.dispatchedEvents.get(1).getClass()); } @Test public void testWorkaroundForAsteriskBug13319() throws Exception { expect(socketConnectionFacade.readLine()).andReturn("Event: RTCPReceived"); expect(socketConnectionFacade.readLine()).andReturn("From 192.168.0.1:1234"); expect(socketConnectionFacade.readLine()).andReturn("HighestSequence: 999"); expect(socketConnectionFacade.readLine()).andReturn(""); expect(socketConnectionFacade.readLine()).andReturn(null); replay(socketConnectionFacade); managerReader.setSocket(socketConnectionFacade); managerReader.run(); verify(socketConnectionFacade); assertEquals("not exactly two events dispatched", 2, dispatcher.dispatchedEvents.size()); assertEquals("first event must be a RtcpReceivedEvent", RtcpReceivedEvent.class, dispatcher.dispatchedEvents.get(0).getClass()); RtcpReceivedEvent rtcpReceivedEvent = (RtcpReceivedEvent) dispatcher.dispatchedEvents.get(0); assertEquals("Invalid from address on RtcpReceivedEvent", "192.168.0.1", rtcpReceivedEvent.getFromAddress().getHostAddress()); assertEquals("Invalid from port on RtcpReceivedEvent", new Integer(1234), rtcpReceivedEvent.getFromPort()); assertEquals("Invalid highest sequence on RtcpReceivedEvent", new Long(999), rtcpReceivedEvent.getHighestSequence()); assertEquals("second event must be a DisconnectEvent", DisconnectEvent.class, dispatcher.dispatchedEvents.get(1).getClass()); } // todo fix testRunReceivingUserEvent public void XtestRunReceivingUserEvent() throws Exception { managerReader.registerEventClass(MyUserEvent.class); expect(socketConnectionFacade.readLine()).andReturn("Event: MyUser"); expect(socketConnectionFacade.readLine()).andReturn(""); expect(socketConnectionFacade.readLine()).andReturn(null); replay(socketConnectionFacade); managerReader.setSocket(socketConnectionFacade); managerReader.run(); verify(socketConnectionFacade); assertEquals("not exactly two events dispatched", 2, dispatcher.dispatchedEvents.size()); assertEquals("first event must be a MyUserEvent", MyUserEvent.class, dispatcher.dispatchedEvents.get(0).getClass()); assertEquals("second event must be a DisconnectEvent", DisconnectEvent.class, dispatcher.dispatchedEvents.get(1).getClass()); } @Test public void testRunReceivingResponse() throws Exception { expect(socketConnectionFacade.readLine()).andReturn("Response: Success"); expect(socketConnectionFacade.readLine()).andReturn("Message: Authentication accepted"); expect(socketConnectionFacade.readLine()).andReturn(""); expect(socketConnectionFacade.readLine()).andReturn(null); replay(socketConnectionFacade); managerReader.setSocket(socketConnectionFacade); managerReader.run(); verify(socketConnectionFacade); assertEquals("not exactly one response dispatched", 1, dispatcher.dispatchedResponses.size()); assertEquals("first response must be a ManagerResponse", ManagerResponse.class, dispatcher.dispatchedResponses.get(0).getClass()); assertEquals("ManagerResponse contains incorrect response", "Success", dispatcher.dispatchedResponses.get(0).getResponse()); assertEquals("ManagerResponse contains incorrect message", "Authentication accepted", dispatcher.dispatchedResponses.get(0).getMessage()); assertEquals("ManagerResponse contains incorrect message (via getAttribute)", "Authentication accepted", dispatcher.dispatchedResponses.get(0).getAttribute("MESSAGE")); assertEquals("ManagerResponse contains incorrect dateReceived", now, dispatcher.dispatchedResponses.get(0).getDateReceived()); assertEquals("not exactly one events dispatched", 1, dispatcher.dispatchedEvents.size()); assertEquals("first event must be a DisconnectEvent", DisconnectEvent.class, dispatcher.dispatchedEvents.get(0).getClass()); } @Test public void testRunReceivingCommandResponse() throws Exception { List<String> result = new ArrayList<String>(); expect(socketConnectionFacade.readLine()).andReturn("Response: Follows"); expect(socketConnectionFacade.readLine()).andReturn("ActionID: 678#12345"); expect(socketConnectionFacade.readLine()).andReturn("Line1\nLine2\n--END COMMAND--"); expect(socketConnectionFacade.readLine()).andReturn(""); expect(socketConnectionFacade.readLine()).andReturn(null); result.add("Line1"); result.add("Line2"); replay(socketConnectionFacade); managerReader.setSocket(socketConnectionFacade); managerReader.expectResponseClass("678", CommandResponse.class); managerReader.run(); verify(socketConnectionFacade); assertEquals("not exactly one response dispatched", 1, dispatcher.dispatchedResponses.size()); assertEquals("first response must be a CommandResponse", CommandResponse.class, dispatcher.dispatchedResponses.get(0).getClass()); assertEquals("CommandResponse contains incorrect response", "Follows", dispatcher.dispatchedResponses.get(0).getResponse()); assertEquals("CommandResponse contains incorrect actionId", "678#12345", dispatcher.dispatchedResponses.get(0).getActionId()); assertEquals("CommandResponse contains incorrect actionId (via getAttribute)", "678#12345", dispatcher.dispatchedResponses.get(0).getAttribute("actionId")); assertEquals("CommandResponse contains incorrect result", result, ((CommandResponse) dispatcher.dispatchedResponses.get(0)).getResult()); assertEquals("CommandResponse contains incorrect dateReceived", now, dispatcher.dispatchedResponses.get(0).getDateReceived()); } @Test public void testRunCatchingIOException() throws Exception { expect(socketConnectionFacade.readLine()).andThrow(new IOException("Something happened to the network...")); replay(socketConnectionFacade); managerReader.setSocket(socketConnectionFacade); managerReader.run(); verify(socketConnectionFacade); assertEquals("must not dispatch a response", 0, dispatcher.dispatchedResponses.size()); assertEquals("not exactly one events dispatched", 1, dispatcher.dispatchedEvents.size()); assertEquals("first event must be a DisconnectEvent", DisconnectEvent.class, dispatcher.dispatchedEvents.get(0).getClass()); } private class MockedDispatcher implements Dispatcher { List<ManagerEvent> dispatchedEvents; List<ManagerResponse> dispatchedResponses; public MockedDispatcher() { this.dispatchedEvents = new ArrayList<ManagerEvent>(); this.dispatchedResponses = new ArrayList<ManagerResponse>(); } public void dispatchResponse(ManagerResponse response) { dispatchedResponses.add(response); } public void dispatchEvent(ManagerEvent event) { dispatchedEvents.add(event); } } }
package com.stickycoding.rokon; /** * DynamicObject.java * A child of StaticObject, DynamicObject has functions for time dependent movement and rotation * * There are two types of translational movement, one based on X and Y, and one based on a magnitude and angle * The two methods can be used together, though it is recommended to avoid this - it may be easy to get confused * * @author Richard */ public class Sprite extends GameObject implements Updateable { /** * Constants used for rotateTo */ public static final int ROTATE_TO_AUTOMATIC = 0, ROTATE_TO_CLOCKWISE = 1, ROTATE_TO_ANTI_CLOCKWISE = 2; /** * The maximum number of Modifiers which can be active on a Sprite */ public static final int MAX_MODIFIERS = 8; protected int modifierCount = 0; protected Modifier[] modifier = new Modifier[MAX_MODIFIERS]; protected boolean isRotateTo = false; protected float rotateToAngleStart, rotateToAngle; protected long rotateToStartTime; protected int rotateToTime, rotateToType, rotateToDirection; protected Callback rotateToCallback; protected float accelerationX, accelerationY, speedX, speedY, terminalSpeedX, terminalSpeedY; protected boolean useTerminalSpeedX, useTerminalSpeedY; protected float acceleration, velocity, velocityAngle, velocityXFactor, velocityYFactor, terminalVelocity; protected boolean useTerminalVelocity; protected float angularVelocity, angularAcceleration, terminalAngularVelocity; protected boolean useTerminalAngularVelocity; protected Polygon polygon = Rokon.rectangle; /** * Sets a Polygon for this Sprite * * @param polygon valid Polygon */ public void setPolygon(Polygon polygon) { this.polygon = polygon; } /** * Fetches the Polygon associated with this Sprite. If none is set, it defaults to Rokon.rectangle * * @return Polygon object */ public Polygon getPolygon() { return polygon; } /** * Returns a specific vertex of this Sprite, as it is drawn. Taking into account scaling and rotations. * * @param index vertex position * * @return float array, contains two elements, 0=X 1=Y */ public float[] getVertex(int index) { if(rotation != 0) { float x = getX() + (getWidth() * polygon.vertex[index].getX()); float y = getY() + (getHeight() * polygon.vertex[index].getY()); float pivotX = getX() + (getWidth() * 0.5f); float pivotY = getY() + (getHeight() * 0.5f); float[] f = MathHelper.rotate(rotation, x, y, pivotX, pivotY); return f; } else { return new float[] { getX() + (getWidth() * polygon.vertex[index].getX()), getY() + (getHeight() * polygon.vertex[index].getY()) }; } } /** * Creates a Sprite with given dimensions * * @param x x coordinate * @param y y coordinate * @param width width * @param height height */ public Sprite(float x, float y, float width, float height) { super(x, y, width, height); } /** * Called when the DynamicObject is removed from a Layer */ public void onRemove() { super.onRemove(); rotateToCallback = null; moveToCallback = null; } /** * Stops all the dynamics for this object */ public void stop() { isMoveTo = false; accelerationX = 0; accelerationY = 0; acceleration = 0; speedX = 0; speedY = 0; velocity = 0; velocityXFactor = 0; velocityYFactor = 0; velocityAngle = 0; terminalSpeedX = 0; terminalSpeedY = 0; terminalVelocity = 0; angularVelocity = 0; angularAcceleration = 0; terminalAngularVelocity = 0; } public void onUpdate() { super.onUpdate(); if(isMoveTo) { onUpdateMoveTo(); } if(isRotateTo) { onUpdateRotateTo(); } if(accelerationX != 0) { speedX += accelerationX * Time.ticksFraction; if(useTerminalSpeedX && ((accelerationX > 0 && speedX > terminalSpeedX) || (accelerationX < 0 && speedY < terminalSpeedX))) { accelerationX = 0; speedX = terminalSpeedX; if(parentScene.useInvoke) attemptInvoke("onReachTerminalSpeedX"); } } if(accelerationY != 0) { speedY += accelerationY * Time.ticksFraction; if(useTerminalSpeedY && ((accelerationY > 0 && speedY > terminalSpeedY) || (accelerationY < 0 && speedY < terminalSpeedY))) { accelerationY = 0; speedY = terminalSpeedY; if(parentScene.useInvoke) attemptInvoke("onReachTerminalSpeedY"); } } if(speedX != 0) { moveX(speedX * Time.ticksFraction); } if(speedY != 0) { moveY(speedY * Time.ticksFraction); } if(acceleration != 0) { velocity += acceleration * Time.ticksFraction; if(useTerminalVelocity && ((acceleration > 0 && velocity > terminalVelocity) || (acceleration < 0 && velocity < terminalVelocity))) { acceleration = 0; velocity = terminalVelocity; if(parentScene.useInvoke) attemptInvoke("onReachTerminalVelocity"); } } if(velocity != 0) { moveX(velocityXFactor * (velocity * Time.ticksFraction)); moveY(velocityYFactor * (velocity * Time.ticksFraction)); } if(angularAcceleration != 0) { angularVelocity += angularAcceleration * Time.ticksFraction; if(useTerminalAngularVelocity && ((angularAcceleration > 0 && angularVelocity > terminalAngularVelocity) || (angularAcceleration < 0 && angularVelocity < terminalAngularVelocity))) { angularAcceleration = 0; angularVelocity = terminalAngularVelocity; attemptInvoke("onReachTerminalAngularVelocity"); } } if(angularVelocity != 0) { rotation += angularVelocity * Time.ticksFraction; } if(modifierCount > 0) { for(int i = 0; i < MAX_MODIFIERS; i++) { if(modifier[i] != null) { modifier[i].onUpdate(this); } } } } /** * Sets speed of the DynamicObject in the X direction * * @param x positive or negative floating point */ public void setSpeedX(float x) { speedX = x; } /** * Sets speed of the DynamicObject in the Y direction * * @param y positive or negative floating point */ public void setSpeedY(float y) { speedY = y; } /** * Sets the speed of the DynamicObject on both X and Y axis * * @param x positive or negative floating point * @param y positive or negative floating point */ public void setSpeed(int x, int y) { speedX = x; speedY = y; } /** * Sets the velocity of the DynamicObject * This is along the velocityAngle, and will be north if previously unset * * @param velocity positive or negative floating point */ public void setVelocity(int velocity) { this.velocity = velocity; } /** * Sets the velocity of the DynamicObject * * @param velocity positive or negative floating point * @param angle relative to north, in radians */ public void setVelocity(float velocity, float angle) { this.velocity = velocity; this.velocityAngle = angle; this.velocityXFactor = (float)Math.sin(angle); this.velocityYFactor = (float)Math.cos(angle); } /** * Accelerates along the X direction * * @param accelerationX positive or negative floating point */ public void accelerateX(float accelerationX) { this.accelerationX = accelerationX; } public void accelerateX(float accelerationX, float terminalSpeedX) { this.accelerationX = accelerationX; this.terminalSpeedX = terminalSpeedX; useTerminalSpeedX = true; } /** * Accelerates along the Y direction * * @param accelerationY positive or negative floating point */ public void accelerateY(float accelerationY) { this.accelerationY = accelerationY; } /** * Accelerates along the Y direction to a maximum speed * * @param accelerationY positive or negative floating point * @param terminalSpeedY the maximum speed to achieve in Y direction */ public void accelerateY(float accelerationY, float terminalSpeedY) { this.accelerationY = accelerationY; this.terminalSpeedY = terminalSpeedY; useTerminalSpeedY = true; } /** * Accelerates along a given angle * * @param acceleration magnitude of acceleration * @param angle relative to north, in radians */ public void accelerate(float acceleration, float angle) { this.acceleration = acceleration; this.velocityAngle = angle; this.velocityXFactor = (float)Math.sin(angle); this.velocityYFactor = (float)Math.cos(angle); } /** * Accelerates along a given angle to a terminal velocity * * @param acceleration magnitude of acceleration * @param angle relative to north, in radians * @param terminalVelocity maximum velocity to reach */ public void accelerate(float acceleration, float angle, float terminalVelocity) { accelerate(acceleration, angle); this.terminalVelocity = terminalVelocity; useTerminalVelocity = true; } /** * Removes the limit on speed in the X direction */ public void stopUsingTerminalSpeedX() { useTerminalSpeedX = false; } /** * Removes the limit on speed in the Y direction */ public void stopUsingTerminalSpeedY() { useTerminalSpeedY = false; } /** * Removes the limit on speed in both X and Y directions */ public void stopUsingTerminalSpeed() { useTerminalSpeedX = false; useTerminalSpeedY = false; } /** * @return TRUE if the DynamicObject is limited to a given speed in the X direction */ public boolean isUsingTerminalSpeedX() { return useTerminalSpeedX; } /** * @return TRUE if the DynamicObject is limited to a given speed in the Y direction */ public boolean isUsingTerminalSpeedY() { return useTerminalSpeedY; } /** * Removes the limit on velocity */ public void stopUsingTerminalVelocity() { useTerminalVelocity = false; } /** * @return TRUE if the DynamicOject is limited to a given terminal velocity */ public boolean isUsingTerminalVelocity() { return useTerminalVelocity; } /** * Removes the limit on angular velocity */ public void stopUsingTerminalAngularVelocity() { useTerminalAngularVelocity = false; } /** * @return TRUE if the DynamicObject is limited to a given terminal angular velocity */ public boolean isUsingTerminalAngularVelocity() { return useTerminalAngularVelocity; } /** * @return current acceleration to speed in X direction */ public float getAccelerationX() { return accelerationX; } /** * @return current acceleration to speed in Y direction */ public float getAccelerationY() { return accelerationY; } /** * @return current acceleration to velocity */ public float getAcceleration() { return acceleration; } /** * @return angular acceleration */ public float getAngularAcceleration() { return angularAcceleration; } /** * @return angular velocity */ public float getAngularVelocity() { return angularVelocity; } /** * @return current angle at which the velocity is being applied */ public float getVelocityAngle() { return velocityAngle; } /** * @return magnitude of the velocity */ public float getVelocity() { return velocity; } /** * @return scalar speed in X direction */ public float getSpeedX() { return speedX; } /** * @return scalar speed in Y direction */ public float getSpeedY() { return speedY; } /** * @return terminal speed in X direction */ public float getTerminalSpeedX() { return terminalSpeedX; } /** * @return terminal speed in Y direction */ public float getTerminalSpeedY() { return terminalSpeedY; } /** * @return terminal velocity */ public float getTerminalVelocity() { return terminalVelocity; } /** * @return terminal angular velocity */ public float getTerminalAngularVelocity() { return terminalAngularVelocity; } /** * Sets the terminal speed in the X direction * * @param terminalSpeedX terminal speed in X */ public void setTerminalSpeedX(float terminalSpeedX) { this.terminalSpeedX = terminalSpeedX; useTerminalSpeedX =true; } /** * Sets the terminal speed in the Y direction * * @param terminalSpeedY terminal speed in Y */ public void setTerminalSpeedY(float terminalSpeedY) { this.terminalSpeedY = terminalSpeedY; useTerminalSpeedY = true; } /** * Sets the terminal speed in both basic directions * * @param terminalSpeedX terminal speed in X * @param terminalSpeedY terminal speed in Y */ public void setTerminalSpeed(float terminalSpeedX, float terminalSpeedY) { this.terminalSpeedX = terminalSpeedX; this.terminalSpeedY = terminalSpeedY; useTerminalSpeedX = true; useTerminalSpeedY = true; } /** * Sets the terminal velocity * * @param terminalVelocity terminal velocity */ public void setTerminalVelocity(float terminalVelocity) { this.terminalVelocity = terminalVelocity; useTerminalVelocity = true; } /** * Sets the terminal angular velocity * * @param terminalAngularVelocity terminal angular velocity */ public void setTerminalAngularVelocity(float terminalAngularVelocity) { this.terminalAngularVelocity = terminalAngularVelocity; useTerminalAngularVelocity = true; } /** * Sets the angular acceleration * * @param acceleration floating point integer, in radians */ public void setAngularAcceleration(float acceleration) { this.angularAcceleration = acceleration; } /** * Rotates the Sprite to a specific angle, over a given time * * @param angle angle, in radians, to rotate to * @param direction the direction (using ROTATE_TO_ constants) * @param time time (in milliseconds) * @param type movement type, as defined in Movement */ public void rotateTo(float angle, int direction, int time, int type) { if(isRotateTo) { if(parentScene.useInvoke) attemptInvoke("onRotateToCancel"); } angularVelocity = 0; angularAcceleration = 0; terminalAngularVelocity = 0; rotateToAngleStart = this.rotation; rotateToAngle = angle; rotateToDirection = direction; isRotateTo = true; rotateToType = type; rotateToStartTime = Time.ticks; rotateToTime = time; rotateToCallback = null; rotation = rotation % Movement.TWO_PI; if(rotateToDirection == ROTATE_TO_AUTOMATIC) { if(rotation > 180f) { if(angle > 180f) { if(angle > rotation) { rotateToDirection = ROTATE_TO_ANTI_CLOCKWISE; } else { rotateToDirection = ROTATE_TO_CLOCKWISE; } } else { if(angle > rotation - 180) { rotateToDirection = ROTATE_TO_ANTI_CLOCKWISE; } else { rotateToDirection = ROTATE_TO_CLOCKWISE; } } } else { if(angle > 180f) { if(angle > rotation + 180) { rotateToDirection = ROTATE_TO_ANTI_CLOCKWISE; rotateToAngleStart += 360; } else { rotateToDirection = ROTATE_TO_CLOCKWISE; } } else { if(angle > rotation) { rotateToDirection = ROTATE_TO_CLOCKWISE; } else { rotateToDirection = ROTATE_TO_ANTI_CLOCKWISE; } } } } Debug.print("Rotating from " + rotation + " to " + angle + " by "+ rotateToDirection); } /** * Rotates to a given angle over a period of time * * @param angle the final angle, in radians * @param direction automatic, clockwise or anticlockwise - defined by ROTATE_TO_ constants * @param time in milliseconds * @param type movement type, through Movement constants * @param callback Callback object for invoking */ public void rotateTo(float angle, int direction, int time, int type, Callback callback) { rotateTo(angle, direction, time, type); rotateToCallback = callback; } protected void onUpdateRotateTo() { float position = (float)(Time.ticks - rotateToStartTime) / (float)rotateToTime; float movementFactor = Movement.getPosition(position, rotateToType); if(position >= 1) { rotation = rotateToAngle; isRotateTo = false; if(parentScene.useInvoke) { attemptInvoke("onRotateToComplete"); } if(rotateToCallback != null) { attemptInvoke(rotateToCallback); } angularVelocity = 0; angularAcceleration = 0; terminalAngularVelocity = 0; return; } if(rotateToDirection == ROTATE_TO_CLOCKWISE) { rotation = rotateToAngleStart + (rotateToAngle - rotateToAngleStart) * movementFactor; } else { rotation = rotateToAngleStart - (rotateToAngleStart - rotateToAngle) * movementFactor; } } protected boolean isMoveTo = false; protected float moveToStartX, moveToStartY, moveToFinalX, moveToFinalY; protected int moveToType; protected long moveToStartTime, moveToTime; protected Callback moveToCallback; public void moveTo(float x, float y, long time, int type, Callback callback) { moveTo(x, y, time, type); moveToCallback = callback; } /** * Moves the DynamicObject to a given spot, in a given time using * All previous motion is cancelled. It may be possible to apply your own velocity * and acceleration changes while moveTo is running, though it should be avoided. * * @param x final X coordinate * @param y final Y coordinate * @param time the time * @param type the movement type, from Movement constants */ public void moveTo(float x, float y, long time, int type) { if(isMoveTo) { if(parentScene.useInvoke) { attemptInvoke("onMoveToCancel"); } } accelerationX = 0; accelerationY = 0; acceleration = 0; speedX = 0; speedY = 0; velocity = 0; velocityXFactor = 0; velocityYFactor = 0; velocityAngle = 0; terminalSpeedX = 0; terminalSpeedY = 0; terminalVelocity = 0; moveToStartX = this.getX(); moveToStartY = this.getY(); moveToFinalX = x; moveToFinalY = y; isMoveTo = true; moveToType = type; moveToStartTime = Time.ticks; moveToTime = time; moveToCallback = null; } /** * Linearly moves the DynamicObject to a given spot, in a given time using * All previous motion is cancelled. It may be possible to apply your own velocity * and acceleration changes while moveTo is running, though it should be avoided. * If the object is already moving, the previous movements onCancel will be triggered if attached to a handler * * @param x final X coordinate * @param y final Y coordinate * @param time the time */ public void moveTo(float x, float y, long time) { moveTo(x, y, time, Movement.LINEAR); } protected void onUpdateMoveTo() { float position = (float)(Time.ticks - moveToStartTime) / (float)moveToTime; float movementFactor = Movement.getPosition(position, moveToType); if(position >= 1) { setX(moveToFinalX); setY(moveToFinalY); isMoveTo = false; if(moveToCallback != null) { attemptInvoke(moveToCallback); } if(parentScene.useInvoke) { attemptInvoke("onMoveToComplete"); } accelerationX = 0; accelerationY = 0; acceleration = 0; speedX = 0; speedY = 0; velocity = 0; velocityXFactor = 0; velocityYFactor = 0; velocityAngle = 0; terminalSpeedX = 0; terminalSpeedY = 0; terminalVelocity = 0; return; } setX(moveToStartX + ((moveToFinalX - moveToStartX) * movementFactor)); setY(moveToStartY + ((moveToFinalY - moveToStartY) * movementFactor)); } /** * Adds a Modifier to this Sprite * * @param modifier valid Modifier object * * @return TRUE if there was room to add the Modifier, FALSE if it failed */ public boolean addModifier(Modifier modifier) { for(int i = 0; i < MAX_MODIFIERS; i++) { if(this.modifier[i] == null) { this.modifier[i] = modifier; modifier.onCreate(this); modifierCount++; return true; } } Debug.warning("Tried addModifier, Sprite is full [" + MAX_MODIFIERS + "]"); return false; } /** * Removes a Modifier from a Sprite (if it exists) * * @param modifier valid Modifier object */ public void removeModifier(Modifier modifier) { for(int i = 0; i < MAX_MODIFIERS; i++) { if(this.modifier[i] == modifier) { this.modifier[i] = null; modifierCount--; return; } } } /** * Clears all the Modifiers from the Sprite */ public void clearModifiers() { for(int i = 0; i < MAX_MODIFIERS; i++) { modifier[i] = null; } } /** * Determines whether a Sprite overlaps with this one * * @param sprite valid Sprite object * * @return TRUE if overlapping, FALSE otherwise */ public boolean intersects(Sprite sprite) { return MathHelper.intersects(this, sprite); } }
/* * Copyright LWJGL. All rights reserved. * License terms: https://www.lwjgl.org/license * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.vulkan; import javax.annotation.*; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * See {@link VkImageStencilUsageCreateInfo}. * * <h3>Layout</h3> * * <pre><code> * struct VkImageStencilUsageCreateInfoEXT { * VkStructureType sType; * void const * pNext; * VkImageUsageFlags stencilUsage; * }</code></pre> */ public class VkImageStencilUsageCreateInfoEXT extends VkImageStencilUsageCreateInfo { /** * Creates a {@code VkImageStencilUsageCreateInfoEXT} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public VkImageStencilUsageCreateInfoEXT(ByteBuffer container) { super(container); } /** Sets the specified value to the {@code sType} field. */ @Override public VkImageStencilUsageCreateInfoEXT sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; } /** Sets the {@link VK12#VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO} value to the {@code sType} field. */ @Override public VkImageStencilUsageCreateInfoEXT sType$Default() { return sType(VK12.VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO); } /** Sets the specified value to the {@code pNext} field. */ @Override public VkImageStencilUsageCreateInfoEXT pNext(@NativeType("void const *") long value) { npNext(address(), value); return this; } /** Sets the specified value to the {@code stencilUsage} field. */ @Override public VkImageStencilUsageCreateInfoEXT stencilUsage(@NativeType("VkImageUsageFlags") int value) { nstencilUsage(address(), value); return this; } /** Initializes this struct with the specified values. */ @Override public VkImageStencilUsageCreateInfoEXT set( int sType, long pNext, int stencilUsage ) { sType(sType); pNext(pNext); stencilUsage(stencilUsage); return this; } /** * Copies the specified struct data to this struct. * * @param src the source struct * * @return this struct */ public VkImageStencilUsageCreateInfoEXT set(VkImageStencilUsageCreateInfoEXT src) { memCopy(src.address(), address(), SIZEOF); return this; } // ----------------------------------- /** Returns a new {@code VkImageStencilUsageCreateInfoEXT} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static VkImageStencilUsageCreateInfoEXT malloc() { return wrap(VkImageStencilUsageCreateInfoEXT.class, nmemAllocChecked(SIZEOF)); } /** Returns a new {@code VkImageStencilUsageCreateInfoEXT} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static VkImageStencilUsageCreateInfoEXT calloc() { return wrap(VkImageStencilUsageCreateInfoEXT.class, nmemCallocChecked(1, SIZEOF)); } /** Returns a new {@code VkImageStencilUsageCreateInfoEXT} instance allocated with {@link BufferUtils}. */ public static VkImageStencilUsageCreateInfoEXT create() { ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF); return wrap(VkImageStencilUsageCreateInfoEXT.class, memAddress(container), container); } /** Returns a new {@code VkImageStencilUsageCreateInfoEXT} instance for the specified memory address. */ public static VkImageStencilUsageCreateInfoEXT create(long address) { return wrap(VkImageStencilUsageCreateInfoEXT.class, address); } /** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkImageStencilUsageCreateInfoEXT createSafe(long address) { return address == NULL ? null : wrap(VkImageStencilUsageCreateInfoEXT.class, address); } /** * Returns a new {@link VkImageStencilUsageCreateInfoEXT.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkImageStencilUsageCreateInfoEXT.Buffer malloc(int capacity) { return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity); } /** * Returns a new {@link VkImageStencilUsageCreateInfoEXT.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static VkImageStencilUsageCreateInfoEXT.Buffer calloc(int capacity) { return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity); } /** * Returns a new {@link VkImageStencilUsageCreateInfoEXT.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static VkImageStencilUsageCreateInfoEXT.Buffer create(int capacity) { ByteBuffer container = __create(capacity, SIZEOF); return wrap(Buffer.class, memAddress(container), capacity, container); } /** * Create a {@link VkImageStencilUsageCreateInfoEXT.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static VkImageStencilUsageCreateInfoEXT.Buffer create(long address, int capacity) { return wrap(Buffer.class, address, capacity); } /** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */ @Nullable public static VkImageStencilUsageCreateInfoEXT.Buffer createSafe(long address, int capacity) { return address == NULL ? null : wrap(Buffer.class, address, capacity); } // ----------------------------------- /** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */ @Deprecated public static VkImageStencilUsageCreateInfoEXT mallocStack() { return malloc(stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */ @Deprecated public static VkImageStencilUsageCreateInfoEXT callocStack() { return calloc(stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */ @Deprecated public static VkImageStencilUsageCreateInfoEXT mallocStack(MemoryStack stack) { return malloc(stack); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */ @Deprecated public static VkImageStencilUsageCreateInfoEXT callocStack(MemoryStack stack) { return calloc(stack); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */ @Deprecated public static VkImageStencilUsageCreateInfoEXT.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */ @Deprecated public static VkImageStencilUsageCreateInfoEXT.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); } /** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */ @Deprecated public static VkImageStencilUsageCreateInfoEXT.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); } /** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */ @Deprecated public static VkImageStencilUsageCreateInfoEXT.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); } /** * Returns a new {@code VkImageStencilUsageCreateInfoEXT} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static VkImageStencilUsageCreateInfoEXT malloc(MemoryStack stack) { return wrap(VkImageStencilUsageCreateInfoEXT.class, stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@code VkImageStencilUsageCreateInfoEXT} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static VkImageStencilUsageCreateInfoEXT calloc(MemoryStack stack) { return wrap(VkImageStencilUsageCreateInfoEXT.class, stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link VkImageStencilUsageCreateInfoEXT.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkImageStencilUsageCreateInfoEXT.Buffer malloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link VkImageStencilUsageCreateInfoEXT.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static VkImageStencilUsageCreateInfoEXT.Buffer calloc(int capacity, MemoryStack stack) { return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** An array of {@link VkImageStencilUsageCreateInfoEXT} structs. */ public static class Buffer extends VkImageStencilUsageCreateInfo.Buffer { private static final VkImageStencilUsageCreateInfoEXT ELEMENT_FACTORY = VkImageStencilUsageCreateInfoEXT.create(-1L); /** * Creates a new {@code VkImageStencilUsageCreateInfoEXT.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link VkImageStencilUsageCreateInfoEXT#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container); } public Buffer(long address, int cap) { super(address, null, -1, 0, cap, cap); } Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected VkImageStencilUsageCreateInfoEXT getElementFactory() { return ELEMENT_FACTORY; } /** Sets the specified value to the {@code sType} field. */ @Override public VkImageStencilUsageCreateInfoEXT.Buffer sType(@NativeType("VkStructureType") int value) { VkImageStencilUsageCreateInfoEXT.nsType(address(), value); return this; } /** Sets the {@link VK12#VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO} value to the {@code sType} field. */ @Override public VkImageStencilUsageCreateInfoEXT.Buffer sType$Default() { return sType(VK12.VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO); } /** Sets the specified value to the {@code pNext} field. */ @Override public VkImageStencilUsageCreateInfoEXT.Buffer pNext(@NativeType("void const *") long value) { VkImageStencilUsageCreateInfoEXT.npNext(address(), value); return this; } /** Sets the specified value to the {@code stencilUsage} field. */ @Override public VkImageStencilUsageCreateInfoEXT.Buffer stencilUsage(@NativeType("VkImageUsageFlags") int value) { VkImageStencilUsageCreateInfoEXT.nstencilUsage(address(), value); return this; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /* * This package is based on the work done by Timothy Gerard Endres * (time@ice.com) to whom the Ant project is very grateful for his great code. */ package org.apache.commons.compress.archivers.tar; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveInputStream; import org.apache.commons.compress.archivers.zip.ZipEncoding; import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; import org.apache.commons.compress.utils.ArchiveUtils; import org.apache.commons.compress.utils.CharsetNames; import org.apache.commons.compress.utils.IOUtils; /** * The TarInputStream reads a UNIX tar archive as an InputStream. * methods are provided to position at each successive entry in * the archive, and the read each entry as a normal input stream * using read(). * @NotThreadSafe */ public class TarArchiveInputStream extends ArchiveInputStream { private static final int SMALL_BUFFER_SIZE = 256; private final byte[] SMALL_BUF = new byte[SMALL_BUFFER_SIZE]; /** The size the TAR header */ private final int recordSize; /** The size of a block */ private final int blockSize; /** True if file has hit EOF */ private boolean hasHitEOF; /** Size of the current entry */ private long entrySize; /** How far into the entry the stream is at */ private long entryOffset; /** An input stream to read from */ private final InputStream is; /** The meta-data about the current entry */ private TarArchiveEntry currEntry; /** The encoding of the file */ private final ZipEncoding encoding; /** * Constructor for TarInputStream. * @param is the input stream to use */ public TarArchiveInputStream(InputStream is) { this(is, TarConstants.DEFAULT_BLKSIZE, TarConstants.DEFAULT_RCDSIZE); } /** * Constructor for TarInputStream. * @param is the input stream to use * @param encoding name of the encoding to use for file names * @since 1.4 */ public TarArchiveInputStream(InputStream is, String encoding) { this(is, TarConstants.DEFAULT_BLKSIZE, TarConstants.DEFAULT_RCDSIZE, encoding); } /** * Constructor for TarInputStream. * @param is the input stream to use * @param blockSize the block size to use */ public TarArchiveInputStream(InputStream is, int blockSize) { this(is, blockSize, TarConstants.DEFAULT_RCDSIZE); } /** * Constructor for TarInputStream. * @param is the input stream to use * @param blockSize the block size to use * @param encoding name of the encoding to use for file names * @since 1.4 */ public TarArchiveInputStream(InputStream is, int blockSize, String encoding) { this(is, blockSize, TarConstants.DEFAULT_RCDSIZE, encoding); } /** * Constructor for TarInputStream. * @param is the input stream to use * @param blockSize the block size to use * @param recordSize the record size to use */ public TarArchiveInputStream(InputStream is, int blockSize, int recordSize) { this(is, blockSize, recordSize, null); } /** * Constructor for TarInputStream. * @param is the input stream to use * @param blockSize the block size to use * @param recordSize the record size to use * @param encoding name of the encoding to use for file names * @since 1.4 */ public TarArchiveInputStream(InputStream is, int blockSize, int recordSize, String encoding) { this.is = is; this.hasHitEOF = false; this.encoding = ZipEncodingHelper.getZipEncoding(encoding); this.recordSize = recordSize; this.blockSize = blockSize; } /** * Closes this stream. Calls the TarBuffer's close() method. * @throws IOException on error */ @Override public void close() throws IOException { is.close(); } /** * Get the record size being used by this stream's buffer. * * @return The TarBuffer record size. */ public int getRecordSize() { return recordSize; } /** * Get the available data that can be read from the current * entry in the archive. This does not indicate how much data * is left in the entire archive, only in the current entry. * This value is determined from the entry's size header field * and the amount of data already read from the current entry. * Integer.MAX_VALUE is returned in case more than Integer.MAX_VALUE * bytes are left in the current entry in the archive. * * @return The number of available bytes for the current entry. * @throws IOException for signature */ @Override public int available() throws IOException { if (entrySize - entryOffset > Integer.MAX_VALUE) { return Integer.MAX_VALUE; } return (int) (entrySize - entryOffset); } /** * Skip bytes in the input buffer. This skips bytes in the * current entry's data, not the entire archive, and will * stop at the end of the current entry's data if the number * to skip extends beyond that point. * * @param numToSkip The number of bytes to skip. * @return the number actually skipped * @throws IOException on error */ @Override public long skip(long numToSkip) throws IOException { long available = entrySize - entryOffset; numToSkip = Math.min(numToSkip, available); long skipped = IOUtils.skip(is, numToSkip); count(skipped); entryOffset += skipped; return skipped; } /** * Since we do not support marking just yet, we do nothing. */ @Override public synchronized void reset() { } /** * Get the next entry in this tar archive. This will skip * over any remaining data in the current entry, if there * is one, and place the input stream at the header of the * next entry, and read the header and instantiate a new * TarEntry from the header bytes and return that entry. * If there are no more entries in the archive, null will * be returned to indicate that the end of the archive has * been reached. * * @return The next TarEntry in the archive, or null. * @throws IOException on error */ public TarArchiveEntry getNextTarEntry() throws IOException { if (hasHitEOF) { return null; } if (currEntry != null) { /* Skip will only go to the end of the current entry */ skip(Long.MAX_VALUE); /* skip to the end of the last record */ skipRecordPadding(); } byte[] headerBuf = getRecord(); if (headerBuf == null) { /* hit EOF */ currEntry = null; return null; } try { currEntry = new TarArchiveEntry(headerBuf, encoding); } catch (IllegalArgumentException e) { IOException ioe = new IOException("Error detected parsing the header"); ioe.initCause(e); throw ioe; } entryOffset = 0; entrySize = currEntry.getSize(); if (currEntry.isGNULongLinkEntry()) { byte[] longLinkData = getLongNameData(); if (longLinkData == null) { // Bugzilla: 40334 // Malformed tar file - long link entry name not followed by // entry return null; } currEntry.setLinkName(encoding.decode(longLinkData)); } if (currEntry.isGNULongNameEntry()) { byte[] longNameData = getLongNameData(); if (longNameData == null) { // Bugzilla: 40334 // Malformed tar file - long entry name not followed by // entry return null; } currEntry.setName(encoding.decode(longNameData)); } if (currEntry.isPaxHeader()){ // Process Pax headers paxHeaders(); } if (currEntry.isGNUSparse()){ // Process sparse files readGNUSparse(); } // If the size of the next element in the archive has changed // due to a new size being reported in the posix header // information, we update entrySize here so that it contains // the correct value. entrySize = currEntry.getSize(); return currEntry; } /** * The last record block should be written at the full size, so skip any * additional space used to fill a record after an entry */ private void skipRecordPadding() throws IOException { if (this.entrySize > 0 && this.entrySize % this.recordSize != 0) { long numRecords = (this.entrySize / this.recordSize) + 1; long padding = (numRecords * this.recordSize) - this.entrySize; long skipped = IOUtils.skip(is, padding); count(skipped); } } /** * Get the next entry in this tar archive as longname data. * * @return The next entry in the archive as longname data, or null. * @throws IOException on error */ protected byte[] getLongNameData() throws IOException { // read in the name ByteArrayOutputStream longName = new ByteArrayOutputStream(); int length = 0; while ((length = read(SMALL_BUF)) >= 0) { longName.write(SMALL_BUF, 0, length); } getNextEntry(); if (currEntry == null) { // Bugzilla: 40334 // Malformed tar file - long entry name not followed by entry return null; } byte[] longNameData = longName.toByteArray(); // remove trailing null terminator(s) length = longNameData.length; while (length > 0 && longNameData[length - 1] == 0) { --length; } if (length != longNameData.length) { byte[] l = new byte[length]; System.arraycopy(longNameData, 0, l, 0, length); longNameData = l; } return longNameData; } /** * Get the next record in this tar archive. This will skip * over any remaining data in the current entry, if there * is one, and place the input stream at the header of the * next entry. * * <p>If there are no more entries in the archive, null will be * returned to indicate that the end of the archive has been * reached. At the same time the {@code hasHitEOF} marker will be * set to true.</p> * * @return The next header in the archive, or null. * @throws IOException on error */ private byte[] getRecord() throws IOException { byte[] headerBuf = readRecord(); hasHitEOF = isEOFRecord(headerBuf); if (hasHitEOF && headerBuf != null) { tryToConsumeSecondEOFRecord(); consumeRemainderOfLastBlock(); headerBuf = null; } return headerBuf; } /** * Determine if an archive record indicate End of Archive. End of * archive is indicated by a record that consists entirely of null bytes. * * @param record The record data to check. * @return true if the record data is an End of Archive */ protected boolean isEOFRecord(byte[] record) { return record == null || ArchiveUtils.isArrayZero(record, recordSize); } /** * Read a record from the input stream and return the data. * * @return The record data or null if EOF has been hit. * @throws IOException on error */ protected byte[] readRecord() throws IOException { byte[] record = new byte[recordSize]; int readNow = IOUtils.readFully(is, record); count(readNow); if (readNow != recordSize) { return null; } return record; } private void paxHeaders() throws IOException{ Map<String, String> headers = parsePaxHeaders(this); getNextEntry(); // Get the actual file entry applyPaxHeadersToCurrentEntry(headers); } Map<String, String> parsePaxHeaders(InputStream i) throws IOException { Map<String, String> headers = new HashMap<String, String>(); // Format is "length keyword=value\n"; while(true){ // get length int ch; int len = 0; int read = 0; while((ch = i.read()) != -1) { read++; if (ch == ' '){ // End of length string // Get keyword ByteArrayOutputStream coll = new ByteArrayOutputStream(); while((ch = i.read()) != -1) { read++; if (ch == '='){ // end of keyword String keyword = coll.toString(CharsetNames.UTF_8); // Get rest of entry byte[] rest = new byte[len - read]; int got = i.read(rest); if (got != len - read){ throw new IOException("Failed to read " + "Paxheader. Expected " + (len - read) + " bytes, read " + got); } // Drop trailing NL String value = new String(rest, 0, len - read - 1, CharsetNames.UTF_8); headers.put(keyword, value); break; } coll.write((byte) ch); } break; // Processed single header } len *= 10; len += ch - '0'; } if (ch == -1){ // EOF break; } } return headers; } private void applyPaxHeadersToCurrentEntry(Map<String, String> headers) { /* * The following headers are defined for Pax. * atime, ctime, charset: cannot use these without changing TarArchiveEntry fields * mtime * comment * gid, gname * linkpath * size * uid,uname * SCHILY.devminor, SCHILY.devmajor: don't have setters/getters for those */ for (Entry<String, String> ent : headers.entrySet()){ String key = ent.getKey(); String val = ent.getValue(); if ("path".equals(key)){ currEntry.setName(val); } else if ("linkpath".equals(key)){ currEntry.setLinkName(val); } else if ("gid".equals(key)){ currEntry.setGroupId(Integer.parseInt(val)); } else if ("gname".equals(key)){ currEntry.setGroupName(val); } else if ("uid".equals(key)){ currEntry.setUserId(Integer.parseInt(val)); } else if ("uname".equals(key)){ currEntry.setUserName(val); } else if ("size".equals(key)){ currEntry.setSize(Long.parseLong(val)); } else if ("mtime".equals(key)){ currEntry.setModTime((long) (Double.parseDouble(val) * 1000)); } else if ("SCHILY.devminor".equals(key)){ currEntry.setDevMinor(Integer.parseInt(val)); } else if ("SCHILY.devmajor".equals(key)){ currEntry.setDevMajor(Integer.parseInt(val)); } } } /** * Adds the sparse chunks from the current entry to the sparse chunks, * including any additional sparse entries following the current entry. * * @throws IOException on error * * @todo Sparse files get not yet really processed. */ private void readGNUSparse() throws IOException { /* we do not really process sparse files yet sparses = new ArrayList(); sparses.addAll(currEntry.getSparses()); */ if (currEntry.isExtended()) { TarArchiveSparseEntry entry; do { byte[] headerBuf = getRecord(); if (headerBuf == null) { currEntry = null; break; } entry = new TarArchiveSparseEntry(headerBuf); /* we do not really process sparse files yet sparses.addAll(entry.getSparses()); */ } while (entry.isExtended()); } } /** * Returns the next Archive Entry in this Stream. * * @return the next entry, * or {@code null} if there are no more entries * @throws IOException if the next entry could not be read */ @Override public ArchiveEntry getNextEntry() throws IOException { return getNextTarEntry(); } /** * Tries to read the next record rewinding the stream if it is not a EOF record. * * <p>This is meant to protect against cases where a tar * implementation has written only one EOF record when two are * expected. Actually this won't help since a non-conforming * implementation likely won't fill full blocks consisting of - by * default - ten records either so we probably have already read * beyond the archive anyway.</p> */ private void tryToConsumeSecondEOFRecord() throws IOException { boolean shouldReset = true; boolean marked = is.markSupported(); if (marked) { is.mark(recordSize); } try { shouldReset = !isEOFRecord(readRecord()); } finally { if (shouldReset && marked) { pushedBackBytes(recordSize); is.reset(); } } } /** * Reads bytes from the current tar archive entry. * * This method is aware of the boundaries of the current * entry in the archive and will deal with them as if they * were this stream's start and EOF. * * @param buf The buffer into which to place bytes read. * @param offset The offset at which to place bytes read. * @param numToRead The number of bytes to read. * @return The number of bytes read, or -1 at EOF. * @throws IOException on error */ @Override public int read(byte[] buf, int offset, int numToRead) throws IOException { int totalRead = 0; if (hasHitEOF || entryOffset >= entrySize) { return -1; } numToRead = Math.min(numToRead, available()); totalRead = is.read(buf, offset, numToRead); count(totalRead); if (totalRead == -1) { hasHitEOF = true; } else { entryOffset += totalRead; } return totalRead; } /** * Whether this class is able to read the given entry. * * <p>May return false if the current entry is a sparse file.</p> */ @Override public boolean canReadEntryData(ArchiveEntry ae) { if (ae instanceof TarArchiveEntry) { TarArchiveEntry te = (TarArchiveEntry) ae; return !te.isGNUSparse(); } return false; } /** * Get the current TAR Archive Entry that this input stream is processing * * @return The current Archive Entry */ public TarArchiveEntry getCurrentEntry() { return currEntry; } protected final void setCurrentEntry(TarArchiveEntry e) { currEntry = e; } protected final boolean isAtEOF() { return hasHitEOF; } protected final void setAtEOF(boolean b) { hasHitEOF = b; } /** * This method is invoked once the end of the archive is hit, it * tries to consume the remaining bytes under the assumption that * the tool creating this archive has padded the last block. */ private void consumeRemainderOfLastBlock() throws IOException { long bytesReadOfLastBlock = getBytesRead() % blockSize; if (bytesReadOfLastBlock > 0) { long skipped = IOUtils.skip(is, blockSize - bytesReadOfLastBlock); count(skipped); } } /** * Checks if the signature matches what is expected for a tar file. * * @param signature * the bytes to check * @param length * the number of bytes to check * @return true, if this stream is a tar archive stream, false otherwise */ public static boolean matches(byte[] signature, int length) { if (length < TarConstants.VERSION_OFFSET+TarConstants.VERSIONLEN) { return false; } if (ArchiveUtils.matchAsciiBuffer(TarConstants.MAGIC_POSIX, signature, TarConstants.MAGIC_OFFSET, TarConstants.MAGICLEN) && ArchiveUtils.matchAsciiBuffer(TarConstants.VERSION_POSIX, signature, TarConstants.VERSION_OFFSET, TarConstants.VERSIONLEN) ){ return true; } if (ArchiveUtils.matchAsciiBuffer(TarConstants.MAGIC_GNU, signature, TarConstants.MAGIC_OFFSET, TarConstants.MAGICLEN) && ( ArchiveUtils.matchAsciiBuffer(TarConstants.VERSION_GNU_SPACE, signature, TarConstants.VERSION_OFFSET, TarConstants.VERSIONLEN) || ArchiveUtils.matchAsciiBuffer(TarConstants.VERSION_GNU_ZERO, signature, TarConstants.VERSION_OFFSET, TarConstants.VERSIONLEN) ) ){ return true; } // COMPRESS-107 - recognise Ant tar files if (ArchiveUtils.matchAsciiBuffer(TarConstants.MAGIC_ANT, signature, TarConstants.MAGIC_OFFSET, TarConstants.MAGICLEN) && ArchiveUtils.matchAsciiBuffer(TarConstants.VERSION_ANT, signature, TarConstants.VERSION_OFFSET, TarConstants.VERSIONLEN) ){ return true; } return false; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.optimizer.calcite.rules.views; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import org.apache.calcite.adapter.druid.DruidQuery; import org.apache.calcite.interpreter.BindableConvention; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptMaterialization; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.plan.hep.HepPlanner; import org.apache.calcite.plan.hep.HepProgramBuilder; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelVisitor; import org.apache.calcite.rel.core.Aggregate; import org.apache.calcite.rel.core.Aggregate.Group; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rel.core.Filter; import org.apache.calcite.rel.core.Project; import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.tools.RelBuilder; import org.apache.calcite.util.ImmutableBitSet; import org.apache.hadoop.hive.common.ValidTxnWriteIdList; import org.apache.hadoop.hive.common.ValidWriteIdList; import org.apache.hadoop.hive.metastore.api.CreationMetadata; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.lockmgr.HiveTxnManager; import org.apache.hadoop.hive.ql.lockmgr.LockException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveRelOptMaterialization; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelFactories; import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveGroupingID; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveRelNode; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan; import org.apache.hadoop.hive.ql.parse.DruidSqlOperatorConverter; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext; import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType; import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hive.common.util.TxnIdUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HiveMaterializedViewUtils { private static final Logger LOG = LoggerFactory.getLogger(HiveMaterializedViewUtils.class); private HiveMaterializedViewUtils() {} public static Table extractTable(RelOptMaterialization materialization) { RelOptHiveTable cachedMaterializedViewTable; if (materialization.tableRel instanceof Project) { // There is a Project on top (due to nullability) cachedMaterializedViewTable = (RelOptHiveTable) materialization.tableRel.getInput(0).getTable(); } else { cachedMaterializedViewTable = (RelOptHiveTable) materialization.tableRel.getTable(); } return cachedMaterializedViewTable.getHiveTableMD(); } /** * Utility method that returns whether a materialized view is outdated (true), not outdated * (false), or it cannot be determined (null). The latest case may happen e.g. when the * materialized view definition uses external tables. */ public static Boolean isOutdatedMaterializedView( String validTxnsList, HiveTxnManager txnMgr, List<String> tablesUsed, Table materializedViewTable) throws LockException { ValidTxnWriteIdList currentTxnWriteIds = txnMgr.getValidWriteIds(tablesUsed, validTxnsList); if (currentTxnWriteIds == null) { LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() + " ignored for rewriting as we could not obtain current txn ids"); return null; } CreationMetadata creationMetadata = materializedViewTable.getCreationMetadata(); if (creationMetadata.getValidTxnList() == null || creationMetadata.getValidTxnList().isEmpty()) { LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() + " ignored for rewriting as we could not obtain materialization txn ids"); return null; } boolean ignore = false; ValidTxnWriteIdList mvTxnWriteIds = new ValidTxnWriteIdList( creationMetadata.getValidTxnList()); for (String qName : tablesUsed) { // Note. If the materialized view does not contain a table that is contained in the query, // we do not need to check whether that specific table is outdated or not. If a rewriting // is produced in those cases, it is because that additional table is joined with the // existing tables with an append-columns only join, i.e., PK-FK + not null. if (!creationMetadata.getTablesUsed().contains(qName)) { continue; } ValidWriteIdList tableCurrentWriteIds = currentTxnWriteIds.getTableValidWriteIdList(qName); if (tableCurrentWriteIds == null) { // Uses non-transactional table, cannot be considered LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() + " ignored for rewriting as it is outdated and cannot be considered for " + " rewriting because it uses non-transactional table " + qName); ignore = true; break; } ValidWriteIdList tableWriteIds = mvTxnWriteIds.getTableValidWriteIdList(qName); if (tableWriteIds == null) { // This should not happen, but we ignore for safety LOG.warn("Materialized view " + materializedViewTable.getFullyQualifiedName() + " ignored for rewriting as details about txn ids for table " + qName + " could not be found in " + mvTxnWriteIds); ignore = true; break; } if (!TxnIdUtils.checkEquivalentWriteIds(tableCurrentWriteIds, tableWriteIds)) { LOG.debug("Materialized view " + materializedViewTable.getFullyQualifiedName() + " contents are outdated"); return true; } } if (ignore) { return null; } return false; } /** * Method to enrich the materialization query contained in the input with * its invalidation. */ public static HiveRelOptMaterialization augmentMaterializationWithTimeInformation( HiveRelOptMaterialization materialization, String validTxnsList, ValidTxnWriteIdList materializationTxnList) throws LockException { // Extract tables used by the query which will in turn be used to generate // the corresponding txn write ids List<String> tablesUsed = new ArrayList<>(); new RelVisitor() { @Override public void visit(RelNode node, int ordinal, RelNode parent) { if (node instanceof TableScan) { TableScan ts = (TableScan) node; tablesUsed.add(((RelOptHiveTable) ts.getTable()).getHiveTableMD().getFullyQualifiedName()); } super.visit(node, ordinal, parent); } }.go(materialization.queryRel); ValidTxnWriteIdList currentTxnList = SessionState.get().getTxnMgr().getValidWriteIds(tablesUsed, validTxnsList); // Augment final RexBuilder rexBuilder = materialization.queryRel.getCluster().getRexBuilder(); final HepProgramBuilder augmentMaterializationProgram = new HepProgramBuilder() .addRuleInstance(new HiveAugmentMaterializationRule(rexBuilder, currentTxnList, materializationTxnList)); final HepPlanner augmentMaterializationPlanner = new HepPlanner( augmentMaterializationProgram.build()); augmentMaterializationPlanner.setRoot(materialization.queryRel); final RelNode modifiedQueryRel = augmentMaterializationPlanner.findBestExp(); return new HiveRelOptMaterialization(materialization.tableRel, modifiedQueryRel, null, materialization.qualifiedTableName, materialization.getScope(), materialization.getRebuildMode()); } /** * If a materialization does not contain grouping sets, it returns the materialization * itself. Otherwise, it will create one materialization for each grouping set. * For each grouping set, the query for the materialization will consist of the group by * columns in the grouping set, followed by a projection to recreate the literal null * values. The view scan will consist of the scan over the materialization followed by a * filter on the grouping id value corresponding to that grouping set. */ public static List<HiveRelOptMaterialization> deriveGroupingSetsMaterializedViews( HiveRelOptMaterialization materialization) { final RelNode query = materialization.queryRel; final Project project; final Aggregate aggregate; if (query instanceof Aggregate) { project = null; aggregate = (Aggregate) query; } else if (query instanceof Project && query.getInput(0) instanceof Aggregate) { project = (Project) query; aggregate = (Aggregate) query.getInput(0); } else { project = null; aggregate = null; } if (aggregate == null) { // Not an aggregate materialized view, return original materialization return Collections.singletonList(materialization); } if (aggregate.getGroupType() == Group.SIMPLE) { // Not a grouping sets materialized view, return original materialization return Collections.singletonList(materialization); } int aggregateGroupingIdIndex = -1; for (int i = 0; i < aggregate.getAggCallList().size(); i++) { if (aggregate.getAggCallList().get(i).getAggregation() == HiveGroupingID.INSTANCE) { aggregateGroupingIdIndex = aggregate.getGroupCount() + i; break; } } Preconditions.checkState(aggregateGroupingIdIndex != -1); int projectGroupingIdIndex = -1; if (project != null) { for (int i = 0; i < project.getProjects().size(); i++) { RexNode expr = project.getProjects().get(i); if (expr instanceof RexInputRef) { RexInputRef ref = (RexInputRef) expr; if (ref.getIndex() == aggregateGroupingIdIndex) { // Grouping id is present projectGroupingIdIndex = i; break; } } } if (projectGroupingIdIndex == -1) { // Grouping id is not present, return original materialization return Collections.singletonList(materialization); } } // Create multiple materializations final List<HiveRelOptMaterialization> materializationList = new ArrayList<>(); final RelBuilder builder = HiveRelFactories.HIVE_BUILDER.create(aggregate.getCluster(), null); final RexBuilder rexBuilder = aggregate.getCluster().getRexBuilder(); final List<AggregateCall> aggregateCalls = new ArrayList<>(aggregate.getAggCallList()); aggregateCalls.remove(aggregateGroupingIdIndex - aggregate.getGroupCount()); for (ImmutableBitSet targetGroupSet : aggregate.getGroupSets()) { // Compute the grouping id value long groupingIdValue = convert(targetGroupSet, aggregate.getGroupSet()); // First we modify the MV query Aggregate newAggregate = aggregate.copy( aggregate.getTraitSet(), aggregate.getInput(), targetGroupSet, null, aggregateCalls); builder.push(newAggregate); List<RexNode> exprs = new ArrayList<>(); for (int pos = 0; pos < aggregate.getGroupCount(); pos++) { int ref = aggregate.getGroupSet().nth(pos); if (targetGroupSet.get(ref)) { exprs.add( rexBuilder.makeInputRef( newAggregate, targetGroupSet.indexOf(ref))); } else { exprs.add( rexBuilder.makeNullLiteral( aggregate.getRowType().getFieldList().get(pos).getType())); } } int pos = targetGroupSet.cardinality(); for (AggregateCall aggregateCall : aggregate.getAggCallList()) { if (aggregateCall.getAggregation() == HiveGroupingID.INSTANCE) { exprs.add( rexBuilder.makeBigintLiteral(new BigDecimal(groupingIdValue))); } else { exprs.add( rexBuilder.makeInputRef(newAggregate, pos++)); } } if (project != null) { // Include projections from top operator Project bottomProject = (Project) builder .project(exprs, ImmutableList.of(), true) .build(); List<RexNode> newNodes = RelOptUtil.pushPastProject(project.getProjects(), bottomProject); builder.push(bottomProject.getInput()) .project(newNodes); } else { builder.project(exprs); } final RelNode newQueryRel = builder.build(); // Second we modify the MV scan builder.push(materialization.tableRel); RexNode condition = rexBuilder.makeCall(SqlStdOperatorTable.EQUALS, rexBuilder.makeInputRef(materialization.tableRel, project != null ? projectGroupingIdIndex : aggregateGroupingIdIndex), rexBuilder.makeBigintLiteral(new BigDecimal(groupingIdValue))); builder.filter(condition); final RelNode newTableRel = builder.build(); final Table scanTable = extractTable(materialization); materializationList.add( new HiveRelOptMaterialization(newTableRel, newQueryRel, null, ImmutableList.of(scanTable.getDbName(), scanTable.getTableName(), "#" + materializationList.size()), materialization.getScope(), materialization.getRebuildMode())); } return materializationList; } /** * Computes the results of the grouping function given the grouping set and the * group by columns. */ private static long convert(ImmutableBitSet targetGroupSet, ImmutableBitSet groupSet) { long value = 0L; for (int i = 0; i < groupSet.length(); ++i) { int bit = groupSet.nth(i); value += targetGroupSet.get(bit) ? 0L : (1L << groupSet.length() - i - 1); } return value; } /** * Method that will recreate the plan rooted at node using the cluster given * as a parameter. */ public static RelNode copyNodeNewCluster(RelOptCluster optCluster, RelNode node) { if (node instanceof Filter) { final Filter f = (Filter) node; return new HiveFilter(optCluster, f.getTraitSet(), copyNodeNewCluster(optCluster, f.getInput()), f.getCondition()); } else if (node instanceof Project) { final Project p = (Project) node; return HiveProject.create(optCluster, copyNodeNewCluster(optCluster, p.getInput()), p.getProjects(), p.getRowType(), Collections.emptyList()); } else { return copyNodeScanNewCluster(optCluster, node); } } /** * Validate if given materialized view has SELECT privileges for current user * @param cachedMVTableList * @return false if user does not have privilege otherwise true * @throws HiveException */ public static boolean checkPrivilegeForMaterializedViews(List<Table> cachedMVTableList) throws HiveException { List<HivePrivilegeObject> privObjects = new ArrayList<HivePrivilegeObject>(); for (Table cachedMVTable:cachedMVTableList) { List<String> colNames = cachedMVTable.getAllCols().stream() .map(FieldSchema::getName) .collect(Collectors.toList()); HivePrivilegeObject privObject = new HivePrivilegeObject(cachedMVTable.getDbName(), cachedMVTable.getTableName(), colNames); privObjects.add(privObject); } try { SessionState.get().getAuthorizerV2(). checkPrivileges(HiveOperationType.QUERY, privObjects, privObjects, new HiveAuthzContext.Builder().build()); } catch (HiveException e) { if (e instanceof HiveAccessControlException) { return false; } throw e; } return true; } private static RelNode copyNodeScanNewCluster(RelOptCluster optCluster, RelNode scan) { final RelNode newScan; if (scan instanceof DruidQuery) { final DruidQuery dq = (DruidQuery) scan; // Ideally we should use HiveRelNode convention. However, since Volcano planner // throws in that case because DruidQuery does not implement the interface, // we set it as Bindable. Currently, we do not use convention in Hive, hence that // should be fine. // TODO: If we want to make use of convention (e.g., while directly generating operator // tree instead of AST), this should be changed. newScan = DruidQuery.create(optCluster, optCluster.traitSetOf(BindableConvention.INSTANCE), scan.getTable(), dq.getDruidTable(), ImmutableList.of(dq.getTableScan()), DruidSqlOperatorConverter.getDefaultMap()); } else { newScan = new HiveTableScan(optCluster, optCluster.traitSetOf(HiveRelNode.CONVENTION), (RelOptHiveTable) scan.getTable(), ((RelOptHiveTable) scan.getTable()).getName(), null, false, false); } return newScan; } }
/******************************************************************************* * Copyright SemanticBits, Northwestern University and Akaza Research * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/caaers/LICENSE.txt for details. ******************************************************************************/ package gov.nih.nci.cabig.caaers.api; import gov.nih.nci.cabig.caaers.domain.ExpeditedAdverseEventReport; import gov.nih.nci.cabig.caaers.domain.ReportFormatType; import gov.nih.nci.cabig.caaers.domain.expeditedfields.ExpeditedReportTree; import gov.nih.nci.cabig.caaers.domain.report.Report; import gov.nih.nci.cabig.caaers.service.EvaluationService; import gov.nih.nci.cabig.caaers.utils.XsltTransformer; import java.io.BufferedReader; import java.io.FileReader; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.util.*; /** * @author Ion C. Olaru * * */ public class AdeersReportGenerator extends BasePDFGenerator { protected final Log log = LogFactory.getLog(getClass()); // TO-DO set in spring config private String xmlXsltFile = "xslt/Caaers2Adeers-xml-AEReport.xslt"; private String xslFOXsltFile = "xslt/Caaers2Adeers-pdf-AEReport.xslt"; private String xslFOMedWatchXsltFile = "xslt/Caaers2Medwatch-pdf-AEReport.xslt"; private String xslFODCPXsltFile = "xslt/Caaers2DCP-pdf-SAEForm.xslt"; private String xslFOCIOMSTypeFormXsltFile = "xslt/Caaers2CIOMS-pdf-TypeForm.xslt"; private String xslFOCIOMSXsltFile = "xslt/Caaers2CIOMS-pdf.xslt"; private String xslFOCustomXsltFile = "xslt/CaaersCustom.xslt"; private String xslE2BXsltFile = "xslt/CaaersToE2b.xslt"; // private String xslFOCustomXsltFile = "/SB/caAERS/trunk/caAERS/software/core/src/main/resources/xslt/CaaersCustom.xslt"; protected AdverseEventReportSerializer adverseEventReportSerializer; protected EvaluationService evaluationService; public void generatePdf(String adverseEventReportXml, String pdfOutFileName) throws Exception { XsltTransformer xsltTrans = new XsltTransformer(); xsltTrans.toPdf(adverseEventReportXml, pdfOutFileName, xslFOXsltFile); } public List<String> generateImage(String adverseEventReportXml, String pngOutFileName) throws Exception { XsltTransformer xsltTrans = new XsltTransformer(); return xsltTrans.toImage(adverseEventReportXml, pngOutFileName, xslFOXsltFile); } public List<String> generateImage(String adverseEventReportXml, String pngOutFileName, Report report) throws Exception { XsltTransformer xsltTrans = new XsltTransformer(); String xsltFile = getXSLTForReportFormatType(report); return xsltTrans.toImage(adverseEventReportXml, pngOutFileName, xsltFile); } public void generateDcpSaeForm(String adverseEventReportXml, String pdfOutFileName) throws Exception { XsltTransformer xsltTrans = new XsltTransformer(); xsltTrans.toPdf(adverseEventReportXml, pdfOutFileName, xslFODCPXsltFile); } public void generateCIOMSTypeForm(String adverseEventReportXml, String pdfOutFileName) throws Exception { XsltTransformer xsltTrans = new XsltTransformer(); xsltTrans.toPdf(adverseEventReportXml, pdfOutFileName, xslFOCIOMSTypeFormXsltFile); } /* * This method generated the PDF file based on the given XML & XSL * * @author Ion C . Olaru * @param adverseEventReportXml Serialized xml content * @param pdfOutFileName The generated PDF file path * * */ public void generateCustomPDF(String adverseEventReportXml, String pdfOutFileName) throws Exception { generatePdf(adverseEventReportXml, pdfOutFileName, xslFOCustomXsltFile); } public void generateCIOMS(String adverseEventReportXml, String pdfOutFileName) throws Exception { XsltTransformer xsltTrans = new XsltTransformer(); xsltTrans.toPdf(adverseEventReportXml, pdfOutFileName, xslFOCIOMSXsltFile); } public void generateMedwatchPdf(String adverseEventReportXml, String pdfOutFileName) throws Exception { XsltTransformer xsltTrans = new XsltTransformer(); xsltTrans.toPdf(adverseEventReportXml, pdfOutFileName, xslFOMedWatchXsltFile); } public String generateE2BXml(String adverseEventReportXml) throws Exception { XsltTransformer xsltTrans = new XsltTransformer(); return xsltTrans.toText(adverseEventReportXml, xslE2BXsltFile); } /** * This method will generate the caAERS internal xml representation of the report. * @param aeReport - A data collection * @param report - A report */ public String generateCaaersXml(ExpeditedAdverseEventReport aeReport,Report report) throws Exception{ evaluationService.evaluateMandatoryness(aeReport, report); return adverseEventReportSerializer.serialize(aeReport, report); } public String generateCaaersWithdrawXml(ExpeditedAdverseEventReport aeReport,Report report) throws Exception{ return adverseEventReportSerializer.serializeWithdrawXML(aeReport,report ); } /** * This method will generate the PDF file and store it in the file system and return its path. * @param report * @param caaersXml * @return * @throws Exception */ public String[] generateExternalReports(Report report, String caaersXml, int reportIdOrReportVersionId) throws Exception { assert report != null; ReportFormatType formatType = report.getReportDefinition().getReportFormatType(); String pdfOutFile = System.getProperty("java.io.tmpdir"); switch (formatType) { case DCPSAEFORM: pdfOutFile += "/dcpSAEForm-" + reportIdOrReportVersionId + ".pdf"; this.generateDcpSaeForm(caaersXml, pdfOutFile); break; case MEDWATCHPDF: pdfOutFile += "/medWatchReport-" + reportIdOrReportVersionId + ".pdf"; this.generateMedwatchPdf(caaersXml, pdfOutFile); break; case CIOMSFORM: pdfOutFile += "/CIOMSForm-" + reportIdOrReportVersionId + ".pdf"; this.generateCIOMS(caaersXml, pdfOutFile); break; case CIOMSSAEFORM: pdfOutFile += "/CIOMS-SAE-Form-" + reportIdOrReportVersionId + ".pdf"; this.generateCIOMSTypeForm(caaersXml, pdfOutFile); break; case CUSTOM_REPORT: pdfOutFile += "/CustomReport-" + reportIdOrReportVersionId + ".pdf"; this.generateCustomPDF(caaersXml, pdfOutFile); break; default: //adders pdfOutFile += "/expeditedAdverseEventReport-" + reportIdOrReportVersionId + ".pdf"; generatePdf(caaersXml, pdfOutFile); break; } return new String[] { pdfOutFile }; } public String getXSLTForReportFormatType(Report report) throws Exception { assert report != null; ReportFormatType formatType = report.getReportDefinition().getReportFormatType(); String xslTFile = xslFOXsltFile; switch (formatType) { case DCPSAEFORM: xslTFile = xslFODCPXsltFile; break; case MEDWATCHPDF: xslTFile = xslFOMedWatchXsltFile; break; case CIOMSFORM: xslTFile = xslFOCIOMSXsltFile; break; case CIOMSSAEFORM: xslTFile = xslFOCIOMSTypeFormXsltFile; break; case CUSTOM_REPORT: xslTFile = xslFOCustomXsltFile; break; case E2BXML: xslTFile = xslE2BXsltFile; default: //adders xslTFile = xslFOXsltFile; break; } return xslTFile; } ///OBJECT PROPERTIES public void setAdverseEventReportSerializer(AdverseEventReportSerializer adverseEventReportSerializer) { this.adverseEventReportSerializer = adverseEventReportSerializer; } public void setEvaluationService(EvaluationService evaluationService) { this.evaluationService = evaluationService; } /** * This method is testting the PDF generation for the given XML & XSL file * * @author Ion C. Olaru * @return generate the File */ public static void createCustomPDFTest() { String XMLFile = "/home/dell/Downloads/expeditedAdverseEventReport-335.xml"; String PDFFile = "/home/dell/Desktop/testAEReport.pdf"; AdeersReportGenerator g = new AdeersReportGenerator(); StringBuffer s = new StringBuffer(""); try { FileReader input = new FileReader(XMLFile); BufferedReader bufRead = new BufferedReader(input); String line = bufRead.readLine(); while (line != null) { s.append(line); line = bufRead.readLine(); } String xml = s.toString(); g.generateCustomPDF(xml, PDFFile); } catch (Exception e) { e.printStackTrace(); } } public static void main(String[] args) { createMedwatchPDFTest(); } public static void createMedwatchPDFTest() { String str1 = ""; try { // String file = "C:\\vin\\caAERS\\caAERS\\software\\expeditedAdverseEventReport-416.xml"; // String pdf = "C:\\vin\\caAERS\\caAERS\\software\\expeditedAdverseEventReport-416.pdf"; // String file = "C:\\vin\\caAERS\\caAERS\\software\\med1.xml"; // String pdf = "C:\\vin\\caAERS\\caAERS\\software\\med1.pdf"; String file = "C:\\vin\\caAERS\\tmp\\sample-msgs\\expeditedAdverseEventReport-374.xml"; String pdf = "C:\\vin\\caAERS\\tmp\\sample-msgs\\expeditedAdverseEventReport-374.pdf"; AdeersReportGenerator aeg = new AdeersReportGenerator(); FileReader input = new FileReader(file); BufferedReader bufRead = new BufferedReader(input); String line = bufRead.readLine(); while (line != null) { str1 = str1 + line; line = bufRead.readLine(); } aeg.generateMedwatchPdf(str1, pdf); // aeg.generateMedwatchPdf(str1, "C:\\medwatch-2.pdf"); } catch (Exception e) { e.printStackTrace(); } } }
package com.xqbase.metric; import java.io.IOException; import java.io.PrintWriter; import java.sql.Connection; import java.sql.Driver; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Arrays; import java.util.Comparator; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.TreeSet; import java.util.function.Function; import java.util.function.ToDoubleFunction; import java.util.stream.DoubleStream; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.json.JSONObject; import com.xqbase.metric.common.MetricValue; import com.xqbase.metric.util.CollectionsEx; import com.xqbase.util.Conf; import com.xqbase.util.Log; import com.xqbase.util.Numbers; import com.xqbase.util.Strings; import com.xqbase.util.Time; import com.xqbase.util.db.ConnectionPool; import com.xqbase.util.db.Row; import com.xqbase.util.function.ConsumerEx; class GroupKey { String tag; int index; GroupKey(String tag, int index) { this.tag = tag; this.index = index; } @Override public boolean equals(Object obj) { GroupKey key = (GroupKey) obj; return index == key.index && tag.equals(key.tag); } @Override public int hashCode() { return tag.hashCode() * 31 + index; } } public class DashboardApi extends HttpServlet { private static final long serialVersionUID = 1L; private static final String QUERY_NAMES = "SELECT name FROM metric_name"; private static final String QUERY_TAGS = "SELECT tags FROM metric_name WHERE name = ?"; private static final String QUERY_ID = "SELECT id FROM metric_name WHERE name = ?"; private static final String AGGREGATE_MINUTE = "SELECT time, metrics FROM metric_minute WHERE id = ? AND time >= ? AND time <= ?"; private static final String AGGREGATE_QUARTER = "SELECT time, metrics FROM metric_quarter WHERE id = ? AND time >= ? AND time <= ?"; private int maxTagValues = 0; private ConnectionPool db = null; @Override public void init() throws ServletException { try { Properties p = Conf.load("jdbc"); Driver driver = (Driver) Class.forName(p. getProperty("driver")).newInstance(); db = new ConnectionPool(driver, p.getProperty("url", ""), p.getProperty("user"), p.getProperty("password")); maxTagValues = Numbers.parseInt(Conf. load("Dashboard").getProperty("max_tag_values")); } catch (ReflectiveOperationException e) { throw new ServletException(e); } } @Override public void destroy() { if (db != null) { db.close(); } } private static double __(String s) { double d = Numbers.parseDouble(s); return Double.isFinite(d) ? d : 0; } private static Class<?> pgConnection = null; private static Map<String, ToDoubleFunction<MetricValue>> methodMap = new HashMap<>(); private static final ToDoubleFunction<MetricValue> NAMES_METHOD = value -> 0; private static final ToDoubleFunction<MetricValue> TAGS_METHOD = value -> 1; static { try { pgConnection = Class.forName("org.postgresql.jdbc.PgConnection"); } catch (ClassNotFoundException e) { // Ignored } methodMap.put("count", MetricValue::getCount); methodMap.put("sum", MetricValue::getSum); methodMap.put("max", MetricValue::getMax); methodMap.put("min", MetricValue::getMin); methodMap.put("avg", MetricValue::getAvg); methodMap.put("std", MetricValue::getStd); methodMap.put("names", NAMES_METHOD); methodMap.put("tags", TAGS_METHOD); } private static void error400(HttpServletResponse resp) { try { resp.sendError(HttpServletResponse.SC_BAD_REQUEST); } catch (IOException e) {/**/} } private static void error500(HttpServletResponse resp, Throwable e) { Log.e(e); try { resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR); } catch (IOException e_) {/**/} } private static void copyHeader(HttpServletRequest req, HttpServletResponse resp, String reqHeader, String respHeader) { String value = req.getHeader(reqHeader); if (value != null) { resp.setHeader(respHeader, value); } } private static void outputJson(HttpServletRequest req, HttpServletResponse resp, Object data) { resp.setCharacterEncoding("UTF-8"); PrintWriter out; try { out = resp.getWriter(); } catch (IOException e) { Log.d("" + e); return; } String json = (data instanceof String ? (String) data : JSONObject.valueToString(data)); String callback = req.getParameter("_callback"); if (callback == null) { copyHeader(req, resp, "Origin", "Access-Control-Allow-Origin"); copyHeader(req, resp, "Access-Control-Request-Methods", "Access-Control-Allow-Methods"); copyHeader(req, resp, "Access-Control-Request-Headers", "Access-Control-Allow-Headers"); resp.setHeader("Access-Control-Allow-Credentials", "true"); resp.setContentType("application/json"); out.print(json); } else { resp.setContentType("text/javascript"); out.print(callback + "(" + json + ");"); } } @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) { // Find Metric Collection and Aggregation Method String path = req.getPathInfo(); if (path == null) { error400(resp); return; } while (!path.isEmpty() && path.charAt(0) == '/') { path = path.substring(1); } int slash = path.indexOf('/'); if (slash < 0) { error400(resp); return; } ToDoubleFunction<MetricValue> method = methodMap.get(path.substring(slash + 1)); if (method == null) { error400(resp); return; } if (method == NAMES_METHOD) { try { Set<String> names = new TreeSet<>(); db.queryEx(row -> names.add(row.getString("name")), QUERY_NAMES); outputJson(req, resp, names); } catch (SQLException e) { error500(resp, e); } return; } String metricName = path.substring(0, slash); if (method == TAGS_METHOD) { Row row; try { row = db.queryEx(QUERY_TAGS, metricName); } catch (SQLException e) { error500(resp, e); return; } if (row == null) { outputJson(req, resp, "{}"); return; } String s = row.getString("tags"); outputJson(req, resp, s == null ? "{}" : s); return; } boolean quarter = metricName.startsWith("_quarter."); if (quarter) { metricName = metricName.substring(9); } int id; try { Row row = db.queryEx(QUERY_ID, metricName); if (row == null) { outputJson(req, resp, "{}"); return; } id = row.getInt("id"); } catch (SQLException e) { error500(resp, e); return; } // Query Condition Map<String, String> query = new HashMap<>(); Enumeration<String> names = req.getParameterNames(); while (names.hasMoreElements()) { String name = names.nextElement(); if (!name.isEmpty() && name.charAt(0) != '_') { query.put(name, req.getParameter(name)); } } // Other Query Parameters int end = Numbers.parseInt(req.getParameter("_end"), (int) (System.currentTimeMillis() / (quarter ? Time.MINUTE / 15 : Time.MINUTE))); int interval = Numbers.parseInt(req.getParameter("_interval"), 1, 1440); int length = Numbers.parseInt(req.getParameter("_length"), 1, 1024); int begin = end - interval * length + 1; String groupBy_ = req.getParameter("_group_by"); Function<Map<String, String>, String> groupBy = groupBy_ == null ? tags -> "_" : tags -> { String value = tags.get(groupBy_); return Strings.isEmpty(value) ? "_" : value; }; // Query Time Range by SQL, Query and Group Tags by Java Map<GroupKey, MetricValue> result = new HashMap<>(); ConsumerEx<ResultSet, SQLException> consumer = rs -> { int index = (rs.getInt("time") - begin) / interval; if (index < 0 || index >= length) { Log.w("Key " + rs.getInt("time") + " out of range, end = " + end + ", interval = " + interval + ", length = " + length); return; } String s = rs.getString("metrics"); for (String line : s.split("\n")) { String[] paths; Map<String, String> tags = new HashMap<>(); int i = line.indexOf('?'); if (i < 0) { paths = line.split("/"); } else { paths = line.substring(0, i).split("/"); String q = line.substring(i + 1); for (String tag : q.split("&")) { i = tag.indexOf('='); if (i > 0) { tags.put(Strings.decodeUrl(tag.substring(0, i)), Strings.decodeUrl(tag.substring(i + 1))); } } } // Query Tags boolean skip = false; for (Map.Entry<String, String> entry : query.entrySet()) { String value = tags.get(entry.getKey()); if (!entry.getValue().equals(value)) { skip = true; break; } } if (skip || paths.length <= 4) { continue; } // Group Tags GroupKey key = new GroupKey(groupBy.apply(tags), index); MetricValue newValue = new MetricValue(Numbers.parseLong(paths[0]), __(paths[1]), __(paths[2]), __(paths[3]), __(paths[4])); MetricValue value = result.get(key); if (value == null) { result.put(key, newValue); } else { value.add(newValue); } } }; try (ConnectionPool.Entry entry = db.borrow()) { Connection conn = entry.getObject(); boolean pg = pgConnection != null && pgConnection.isAssignableFrom(conn.getClass()); // db.query() does not support setAutoCommit if (pg) { conn.setAutoCommit(false); } try (PreparedStatement ps = conn.prepareStatement(quarter ? AGGREGATE_QUARTER : AGGREGATE_MINUTE)) { ps.setInt(1, id); ps.setInt(2, begin); ps.setInt(3, end); try (ResultSet rs = ps.executeQuery()) { while (rs.next()) { consumer.accept(rs); } } } if (pg) { conn.setAutoCommit(true); } entry.setValid(true); } catch (SQLException e) { error500(resp, e); return; } // Generate Data Map<String, double[]> data = new HashMap<>(); result.forEach((key, value) -> { /* Already Filtered during Grouping if (key.index < 0 || key.index >= length) { continue; } */ double[] values = data.get(key.tag); if (values == null) { values = new double[length]; Arrays.fill(values, 0); data.put(key.tag, values); } double d = method.applyAsDouble(value); values[key.index] = Double.isFinite(d) ? d : 0; }); if (maxTagValues > 0 && data.size() > maxTagValues) { outputJson(req, resp, CollectionsEx.toMap(CollectionsEx.max(data.entrySet(), Comparator.comparingDouble(entry -> DoubleStream.of((double[]) entry.getValue()).sum()), maxTagValues))); } else { outputJson(req, resp, data); } } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) { doGet(req, resp); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gora.util; import java.io.ByteArrayOutputStream; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.io.InputStream; import java.io.ObjectInput; import java.io.ObjectInputStream; import java.io.ObjectOutput; import java.io.ObjectOutputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.avro.Schema; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.Decoder; import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.Encoder; import org.apache.avro.io.EncoderFactory; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; import org.apache.avro.specific.SpecificRecord; import org.apache.avro.util.ByteBufferInputStream; import org.apache.avro.util.ByteBufferOutputStream; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.DefaultStringifier; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.io.serializer.Deserializer; import org.apache.hadoop.io.serializer.SerializationFactory; import org.apache.hadoop.io.serializer.Serializer; /** * An utility class for I/O related functionality. */ public class IOUtils { public static final int BUFFER_SIZE = 8192; private static BinaryDecoder decoder; private static Configuration getOrCreateConf(Configuration conf) { return conf != null ? conf : new Configuration(); } public static Object readObject(DataInput in) throws ClassNotFoundException, IOException { if(in instanceof ObjectInput) { return ((ObjectInput)in).readObject(); } else { if(in instanceof InputStream) { ObjectInput objIn = new ObjectInputStream((InputStream)in); Object obj = objIn.readObject(); return obj; } } throw new IOException("cannot read from DataInput of instance:" + in.getClass()); } public static void writeObject(DataOutput out, Object obj) throws IOException { if(out instanceof ObjectOutput) { ((ObjectOutput)out).writeObject(obj); } else { if(out instanceof OutputStream) { ObjectOutput objOut = new ObjectOutputStream((OutputStream)out); objOut.writeObject(obj); } } throw new IOException("cannot write to DataOutput of instance:" + out.getClass()); } /** Serializes the object to the given dataoutput using * available Hadoop serializations * @throws IOException */ public static<T> void serialize(Configuration conf, DataOutput out , T obj, Class<T> objClass) throws IOException { SerializationFactory serializationFactory = new SerializationFactory(getOrCreateConf(conf)); Serializer<T> serializer = serializationFactory.getSerializer(objClass); try (ByteBufferOutputStream os = new ByteBufferOutputStream()) { serializer.open(os); serializer.serialize(obj); int length = 0; List<ByteBuffer> buffers = os.getBufferList(); for(ByteBuffer buffer : buffers) { length += buffer.limit() - buffer.arrayOffset(); } WritableUtils.writeVInt(out, length); for(ByteBuffer buffer : buffers) { byte[] arr = buffer.array(); out.write(arr, buffer.arrayOffset(), buffer.limit()); } }finally { if(serializer != null) serializer.close(); } } /** Serializes the object to the given dataoutput using * available Hadoop serializations * @throws IOException */ @SuppressWarnings("unchecked") public static<T> void serialize(Configuration conf, DataOutput out , T obj) throws IOException { Text.writeString(out, obj.getClass().getName()); serialize(conf, out, obj, (Class<T>)obj.getClass()); } /** Serializes the object to the given dataoutput using * available Hadoop serializations*/ public static<T> byte[] serialize(Configuration conf, T obj) throws IOException { DataOutputBuffer buffer = new DataOutputBuffer(); serialize(conf, buffer, obj); return buffer.getData(); } /** * Serializes the field object using the datumWriter. */ public static<T extends SpecificRecord> void serialize(OutputStream os, SpecificDatumWriter<T> datumWriter, T object) throws IOException { BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(os, null); datumWriter.write(object, encoder); encoder.flush(); } /** * Serializes the field object using the datumWriter. */ public static<T> void serialize(OutputStream os, SpecificDatumWriter<T> datumWriter, T object) throws IOException { BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(os, null); datumWriter.write(object, encoder); encoder.flush(); } /** * Serializes the field object using the datumWriter. */ public static<T extends SpecificRecord> byte[] serialize(SpecificDatumWriter<T> datumWriter , T object) throws IOException { ByteArrayOutputStream os = new ByteArrayOutputStream(); serialize(os, datumWriter, object); return os.toByteArray(); } /** * Serializes the field object using the datumWriter. */ public static<T> byte[] serialize(SpecificDatumWriter<T> datumWriter , T object) throws IOException { ByteArrayOutputStream os = new ByteArrayOutputStream(); serialize(os, datumWriter, object); return os.toByteArray(); } /** Deserializes the object in the given datainput using * available Hadoop serializations. * @throws IOException * @throws ClassNotFoundException */ @SuppressWarnings("unchecked") public static<T> T deserialize(Configuration conf, DataInput in , T obj , String objClass) throws IOException, ClassNotFoundException { Class<T> c = (Class<T>) ClassLoadingUtils.loadClass(objClass); return deserialize(conf, in, obj, c); } /** Deserializes the object in the given datainput using * available Hadoop serializations. * @throws IOException */ public static<T> T deserialize(Configuration conf, DataInput in , T obj , Class<T> objClass) throws IOException { SerializationFactory serializationFactory = new SerializationFactory(getOrCreateConf(conf)); Deserializer<T> deserializer = serializationFactory.getDeserializer( objClass); int length = WritableUtils.readVInt(in); byte[] arr = new byte[length]; in.readFully(arr); List<ByteBuffer> list = new ArrayList<>(); list.add(ByteBuffer.wrap(arr)); try (ByteBufferInputStream is = new ByteBufferInputStream(list)) { deserializer.open(is); T newObj = deserializer.deserialize(obj); return newObj; }finally { if(deserializer != null) deserializer.close(); } } /** Deserializes the object in the given datainput using * available Hadoop serializations. * @throws IOException * @throws ClassNotFoundException */ @SuppressWarnings("unchecked") public static<T> T deserialize(Configuration conf, DataInput in , T obj) throws IOException, ClassNotFoundException { String clazz = Text.readString(in); Class<T> c = (Class<T>)ClassLoadingUtils.loadClass(clazz); return deserialize(conf, in, obj, c); } /** Deserializes the object in the given datainput using * available Hadoop serializations. * @throws IOException * @throws ClassNotFoundException */ public static<T> T deserialize(Configuration conf, byte[] in , T obj) throws IOException, ClassNotFoundException { DataInputBuffer buffer = new DataInputBuffer(); buffer.reset(in, in.length); return deserialize(conf, buffer, obj); } /** * Deserializes the field object using the datumReader. */ public static<K, T extends SpecificRecord> T deserialize(InputStream is, SpecificDatumReader<T> datumReader, T object) throws IOException { decoder = DecoderFactory.get().binaryDecoder(is, decoder); return datumReader.read(object, decoder); } /** * Deserializes the field object using the datumReader. */ public static<K, T extends SpecificRecord> T deserialize(byte[] bytes, SpecificDatumReader<T> datumReader, T object) throws IOException { decoder = DecoderFactory.get().binaryDecoder(bytes, decoder); return datumReader.read(object, decoder); } /** * Deserializes the field object using the datumReader. */ public static<K, T> T deserialize(byte[] bytes, SpecificDatumReader<T> datumReader, T object) throws IOException { decoder = DecoderFactory.get().binaryDecoder(bytes, decoder); return datumReader.read(object, decoder); } /** * Writes a byte[] to the output, representing whether each given field is null * or not. A Vint and ceil( fields.length / 8 ) bytes are written to the output. * @param out the output to write to * @param fields the fields to check for null * @see #readNullFieldsInfo(DataInput) */ public static void writeNullFieldsInfo(DataOutput out, Object ... fields) throws IOException { boolean[] isNull = new boolean[fields.length]; for(int i=0; i<fields.length; i++) { isNull[i] = (fields[i] == null); } writeBoolArray(out, isNull); } /** * Reads the data written by {@link #writeNullFieldsInfo(DataOutput, Object...)} * and returns a boolean array representing whether each field is null or not. * @param in the input to read from * @return a boolean[] representing whether each field is null or not. */ public static boolean[] readNullFieldsInfo(DataInput in) throws IOException { return readBoolArray(in); } /** * Writes a boolean[] to the output. */ public static void writeBoolArray(DataOutput out, boolean[] boolArray) throws IOException { WritableUtils.writeVInt(out, boolArray.length); byte b = 0; int i = 0; for(i=0; i<boolArray.length; i++) { if(i % 8 == 0 && i != 0) { out.writeByte(b); b = 0; } b >>= 1; if(boolArray[i]) b |= 0x80; else b &= 0x7F; } if(i % 8 != 0) { for(int j=0; j < 8 - (i % 8); j++) { //shift for the remaining byte b >>=1; b &= 0x7F; } } out.writeByte(b); } /** * Reads a boolean[] from input * @throws IOException */ public static boolean[] readBoolArray(DataInput in) throws IOException { int length = WritableUtils.readVInt(in); boolean[] arr = new boolean[length]; byte b = 0; for(int i=0; i < length; i++) { if(i % 8 == 0) { b = in.readByte(); } arr[i] = (b & 0x01) > 0; b >>= 1; } return arr; } /** * Writes a boolean[] to the output. */ public static void writeBoolArray(Encoder out, boolean[] boolArray) throws IOException { out.writeInt(boolArray.length); int byteArrLength = (int)Math.ceil(boolArray.length / 8.0); byte b = 0; byte[] arr = new byte[byteArrLength]; int i = 0; int arrIndex = 0; for(i=0; i<boolArray.length; i++) { if(i % 8 == 0 && i != 0) { arr[arrIndex++] = b; b = 0; } b >>= 1; if(boolArray[i]) b |= 0x80; else b &= 0x7F; } if(i % 8 != 0) { for(int j=0; j < 8 - (i % 8); j++) { //shift for the remaining byte b >>=1; b &= 0x7F; } } arr[arrIndex++] = b; out.writeFixed(arr); } /** * Reads a boolean[] from input * @throws IOException */ public static boolean[] readBoolArray(Decoder in) throws IOException { int length = in.readInt(); boolean[] boolArr = new boolean[length]; int byteArrLength = (int)Math.ceil(length / 8.0); byte[] byteArr = new byte[byteArrLength]; in.readFixed(byteArr); int arrIndex = 0; byte b = 0; for(int i=0; i < length; i++) { if(i % 8 == 0) { b = byteArr[arrIndex++]; } boolArr[i] = (b & 0x01) > 0; b >>= 1; } return boolArr; } /** * Writes the String array to the given DataOutput. * @param out the data output to write to * @param arr the array to write * @see #readStringArray(DataInput) */ public static void writeStringArray(DataOutput out, String[] arr) throws IOException { WritableUtils.writeVInt(out, arr.length); for(String str : arr) { Text.writeString(out, str); } } /** * Reads and returns a String array that is written by * {@link #writeStringArray(DataOutput, String[])}. * @param in the data input to read from * @return read String[] */ public static String[] readStringArray(DataInput in) throws IOException { int len = WritableUtils.readVInt(in); String[] arr = new String[len]; for(int i=0; i<len; i++) { arr[i] = Text.readString(in); } return arr; } /** * Stores the given object in the configuration under the given dataKey * @param obj the object to store * @param conf the configuration to store the object into * @param dataKey the key to store the data */ public static<T> void storeToConf(T obj, Configuration conf, String dataKey) throws IOException { String classKey = dataKey + "._class"; conf.set(classKey, obj.getClass().getName()); DefaultStringifier.store(conf, obj, dataKey); } /** * Loads the object stored by {@link #storeToConf(Object, Configuration, String)} * method from the configuration under the given dataKey. * @param conf the configuration to read from * @param dataKey the key to get the data from * @return the store object */ @SuppressWarnings("unchecked") public static<T> T loadFromConf(Configuration conf, String dataKey) throws IOException { String classKey = dataKey + "._class"; String className = conf.get(classKey); try { T obj = (T) DefaultStringifier.load(conf, dataKey, ClassLoadingUtils.loadClass(className)); return obj; } catch (Exception ex) { throw new IOException(ex); } } /** * Copies the contents of the buffers into a single byte[] */ //TODO: not tested public static byte[] getAsBytes(List<ByteBuffer> buffers) { //find total size int size = 0; for(ByteBuffer buffer : buffers) { size += buffer.remaining(); } byte[] arr = new byte[size]; int offset = 0; for(ByteBuffer buffer : buffers) { int len = buffer.remaining(); buffer.get(arr, offset, len); offset += len; } return arr; } /** * Reads until the end of the input stream, and returns the contents as a byte[] */ public static byte[] readFully(InputStream in) throws IOException { List<ByteBuffer> buffers = new ArrayList<>(4); while(true) { ByteBuffer buffer = ByteBuffer.allocate(BUFFER_SIZE); int count = in.read(buffer.array(), 0, BUFFER_SIZE); if(count > 0) { buffer.limit(count); buffers.add(buffer); } if(count < BUFFER_SIZE) break; } return getAsBytes(buffers); } }
// This program is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. package org.uma.jmetal.operator.impl.crossover; import org.junit.Test; import org.mockito.Mockito; import org.springframework.test.util.ReflectionTestUtils; import org.uma.jmetal.problem.BinaryProblem; import org.uma.jmetal.problem.impl.AbstractBinaryProblem; import org.uma.jmetal.solution.BinarySolution; import org.uma.jmetal.solution.impl.DefaultBinarySolution; import org.uma.jmetal.util.JMetalException; import org.uma.jmetal.util.pseudorandom.JMetalRandom; import java.util.ArrayList; import java.util.List; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.*; public class SinglePointCrossoverTest { private static final double EPSILON = 0.00000000000001 ; private static final int BITS_OF_MOCKED_BINARY_PROBLEM = 7 ; @Test public void shouldConstructorAssignTheCorrectProbabilityValue() { double crossoverProbability = 0.1 ; SinglePointCrossover crossover = new SinglePointCrossover(crossoverProbability) ; assertEquals(crossoverProbability, (Double) ReflectionTestUtils .getField(crossover, "crossoverProbability"), EPSILON) ; } @Test (expected = JMetalException.class) public void shouldConstructorFailWhenPassedANegativeProbabilityValue() { double crossoverProbability = -0.1 ; new SinglePointCrossover(crossoverProbability) ; } @Test public void shouldGetMutationProbabilityReturnTheRightValue() { double crossoverProbability = 0.1 ; SinglePointCrossover crossover = new SinglePointCrossover(crossoverProbability) ; assertEquals(crossoverProbability, crossover.getCrossoverProbability(), EPSILON) ; } @Test (expected = JMetalException.class) public void shouldExecuteWithNullParameterThrowAnException() { SinglePointCrossover crossover = new SinglePointCrossover(0.1) ; crossover.execute(null) ; } @Test (expected = JMetalException.class) public void shouldExecuteFailIfTheListContainsOnlyOneSolution() { MockBinaryProblem problem = new MockBinaryProblem(1) ; SinglePointCrossover crossover = new SinglePointCrossover(0.1) ; ArrayList<BinarySolution> solutions = new ArrayList<>(1) ; solutions.add(problem.createSolution()) ; crossover.execute(solutions) ; } @Test (expected = JMetalException.class) public void shouldExecuteFailIfTheListContainsMoreThanTwoSolutions() { MockBinaryProblem problem = new MockBinaryProblem(1) ; SinglePointCrossover crossover = new SinglePointCrossover(0.1) ; ArrayList<BinarySolution> solutions = new ArrayList<>(3) ; solutions.add(problem.createSolution()) ; solutions.add(problem.createSolution()) ; solutions.add(problem.createSolution()) ; crossover.execute(solutions) ; } @Test public void shouldCrossingTwoVariableSolutionsReturnTheSameSolutionsIfNoBitsAreMutated() { int numberOfVariables = 1; JMetalRandom randomGenerator = mock(JMetalRandom.class) ; double crossoverProbability = 0.01; Mockito.when(randomGenerator.nextDouble()).thenReturn(0.02) ; SinglePointCrossover crossover = new SinglePointCrossover(crossoverProbability) ; BinaryProblem problem = new MockBinaryProblem(numberOfVariables) ; ArrayList<BinarySolution> solutions = new ArrayList<>(3) ; solutions.add(problem.createSolution()) ; solutions.add(problem.createSolution()) ; ReflectionTestUtils.setField(crossover, "randomGenerator", randomGenerator); List<BinarySolution> resultSolutions = crossover.execute(solutions) ; assertEquals(solutions.get(0), resultSolutions.get(0)) ; assertEquals(solutions.get(1), resultSolutions.get(1)) ; verify(randomGenerator, times(1)).nextDouble(); } @Test public void shouldCrossingTheFistBitOfTwoSingleVariableSolutionsReturnTheCorrectCrossedSolutions() { int numberOfVariables = 1 ; int cuttingBit = 0 ; JMetalRandom randomGenerator = mock(JMetalRandom.class) ; double crossoverProbability = 0.9; Mockito.when(randomGenerator.nextDouble()).thenReturn(0.5) ; Mockito.when(randomGenerator.nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM - 1)).thenReturn(cuttingBit) ; SinglePointCrossover crossover = new SinglePointCrossover(crossoverProbability) ; BinaryProblem problem = new MockBinaryProblem(numberOfVariables) ; ArrayList<BinarySolution> solutions = new ArrayList<>(3) ; solutions.add(problem.createSolution()) ; solutions.add(problem.createSolution()) ; ReflectionTestUtils.setField(crossover, "randomGenerator", randomGenerator); List<BinarySolution> resultSolutions = crossover.execute(solutions) ; assertEquals(solutions.get(0).getVariableValue(0).get(0), resultSolutions.get(1).getVariableValue(0).get(0)) ; assertEquals(solutions.get(1).getVariableValue(0).get(0), resultSolutions.get(0).getVariableValue(0).get( 0)) ; verify(randomGenerator, times(1)).nextDouble(); verify(randomGenerator, times(1)).nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM - 1); } @Test public void shouldCrossingTheLastBitOfTwoSingleVariableSolutionsReturnTheCorrectCrossedSolutions() { int numberOfVariables = 1 ; int cuttingBit = BITS_OF_MOCKED_BINARY_PROBLEM - 1 ; JMetalRandom randomGenerator = mock(JMetalRandom.class) ; double crossoverProbability = 0.9; Mockito.when(randomGenerator.nextDouble()).thenReturn(0.5) ; Mockito.when(randomGenerator.nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM - 1)).thenReturn(cuttingBit) ; SinglePointCrossover crossover = new SinglePointCrossover(crossoverProbability) ; BinaryProblem problem = new MockBinaryProblem(numberOfVariables) ; ArrayList<BinarySolution> solutions = new ArrayList<>(3) ; solutions.add(problem.createSolution()) ; solutions.add(problem.createSolution()) ; ReflectionTestUtils.setField(crossover, "randomGenerator", randomGenerator); List<BinarySolution> resultSolutions = crossover.execute(solutions) ; assertEquals(solutions.get(0).getVariableValue(0).get(BITS_OF_MOCKED_BINARY_PROBLEM - 1), resultSolutions.get(1).getVariableValue(0).get(BITS_OF_MOCKED_BINARY_PROBLEM - 1)) ; assertEquals(solutions.get(1).getVariableValue(0).get(BITS_OF_MOCKED_BINARY_PROBLEM - 1), resultSolutions.get(0).getVariableValue(0).get(BITS_OF_MOCKED_BINARY_PROBLEM - 1)) ; verify(randomGenerator, times(1)).nextDouble(); verify(randomGenerator, times(1)).nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM - 1); } @Test public void shouldCrossingTheBitInTheMiddleOfTwoSingleVariableSolutionsReturnTheCorrectCrossedSolutions() { int numberOfVariables = 1 ; int cuttingBit = (BITS_OF_MOCKED_BINARY_PROBLEM - 1) / 2 ; JMetalRandom randomGenerator = mock(JMetalRandom.class) ; double crossoverProbability = 0.9; Mockito.when(randomGenerator.nextDouble()).thenReturn(0.5) ; Mockito.when(randomGenerator.nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM - 1)).thenReturn(cuttingBit) ; SinglePointCrossover crossover = new SinglePointCrossover(crossoverProbability) ; BinaryProblem problem = new MockBinaryProblem(numberOfVariables) ; ArrayList<BinarySolution> solutions = new ArrayList<>(3) ; solutions.add(problem.createSolution()) ; solutions.add(problem.createSolution()) ; ReflectionTestUtils.setField(crossover, "randomGenerator", randomGenerator); List<BinarySolution> resultSolutions = crossover.execute(solutions) ; //System.out.println("solution 0: " + solutions.get(0).getVariableValueString(0)) ; //System.out.println("solution 1: " + solutions.get(1).getVariableValueString(0)) ; //System.out.println("solution 2: " + resultSolutions.get(0).getVariableValueString(0)) ; //System.out.println("solution 3: " + resultSolutions.get(1).getVariableValueString(0)) ; assertEquals(solutions.get(0).getVariableValue(0).get((BITS_OF_MOCKED_BINARY_PROBLEM - 1)/2), resultSolutions.get(1).getVariableValue(0).get((BITS_OF_MOCKED_BINARY_PROBLEM - 1)/2)) ; assertEquals(solutions.get(1).getVariableValue(0).get((BITS_OF_MOCKED_BINARY_PROBLEM - 1)/2), resultSolutions.get(0).getVariableValue(0).get((BITS_OF_MOCKED_BINARY_PROBLEM - 1)/2)) ; verify(randomGenerator, times(1)).nextDouble(); verify(randomGenerator, times(1)).nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM - 1); } @Test public void shouldCrossingTheFistBitOfSecondVariableReturnTheCorrectCrossedSolutions() { int numberOfVariables = 3 ; int cuttingBit = BITS_OF_MOCKED_BINARY_PROBLEM ; JMetalRandom randomGenerator = mock(JMetalRandom.class) ; double crossoverProbability = 0.9; Mockito.when(randomGenerator.nextDouble()).thenReturn(0.5) ; Mockito.when(randomGenerator. nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM * numberOfVariables - 1)).thenReturn(cuttingBit) ; SinglePointCrossover crossover = new SinglePointCrossover(crossoverProbability) ; BinaryProblem problem = new MockBinaryProblem(numberOfVariables) ; ArrayList<BinarySolution> solutions = new ArrayList<>(3) ; solutions.add(problem.createSolution()) ; solutions.add(problem.createSolution()) ; ReflectionTestUtils.setField(crossover, "randomGenerator", randomGenerator); List<BinarySolution> resultSolutions = crossover.execute(solutions) ; assertEquals(solutions.get(0).getVariableValue(0), resultSolutions.get(0).getVariableValue(0)) ; assertEquals(solutions.get(1).getVariableValue(0), resultSolutions.get(1).getVariableValue(0)) ; assertEquals(solutions.get(0).getVariableValue(1), resultSolutions.get(1).getVariableValue(1)) ; assertEquals(solutions.get(1).getVariableValue(1), resultSolutions.get(0).getVariableValue(1)) ; assertEquals(solutions.get(0).getVariableValue(2), resultSolutions.get(1).getVariableValue(2)) ; assertEquals(solutions.get(1).getVariableValue(2), resultSolutions.get(0).getVariableValue(2)) ; verify(randomGenerator, times(1)).nextDouble(); verify(randomGenerator, times(1)).nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM*3 - 1); } @Test public void shouldCrossingTheBitInTheMiddleOfSecondVariableReturnTheCorrectCrossedSolutions() { int numberOfVariables = 3 ; int cuttingBit = (int) (BITS_OF_MOCKED_BINARY_PROBLEM*1.5); JMetalRandom randomGenerator = mock(JMetalRandom.class) ; double crossoverProbability = 0.9; Mockito.when(randomGenerator.nextDouble()).thenReturn(0.5) ; Mockito.when(randomGenerator. nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM * numberOfVariables - 1)) .thenReturn(cuttingBit) ; SinglePointCrossover crossover = new SinglePointCrossover(crossoverProbability) ; BinaryProblem problem = new MockBinaryProblem(numberOfVariables) ; ArrayList<BinarySolution> solutions = new ArrayList<>(3) ; solutions.add(problem.createSolution()) ; solutions.add(problem.createSolution()) ; ReflectionTestUtils.setField(crossover, "randomGenerator", randomGenerator); List<BinarySolution> resultSolutions = crossover.execute(solutions) ; assertEquals(solutions.get(0).getVariableValue(0), resultSolutions.get(0).getVariableValue(0)) ; assertEquals(solutions.get(1).getVariableValue(0), resultSolutions.get(1).getVariableValue(0)) ; int cuttingBitInSecondVariable = cuttingBit - BITS_OF_MOCKED_BINARY_PROBLEM ; assertEquals(solutions.get(0).getVariableValue(1).get(0, cuttingBitInSecondVariable), resultSolutions.get(0).getVariableValue(1).get(0, cuttingBitInSecondVariable)) ; assertEquals(solutions.get(1).getVariableValue(1).get(0, cuttingBitInSecondVariable), resultSolutions.get(1).getVariableValue(1).get(0, cuttingBitInSecondVariable)) ; assertEquals(solutions.get(0).getVariableValue(1).get(cuttingBitInSecondVariable, BITS_OF_MOCKED_BINARY_PROBLEM), resultSolutions.get(1).getVariableValue(1).get(cuttingBitInSecondVariable, BITS_OF_MOCKED_BINARY_PROBLEM)) ; assertEquals(solutions.get(1).getVariableValue(1).get(cuttingBitInSecondVariable, BITS_OF_MOCKED_BINARY_PROBLEM), resultSolutions.get(0).getVariableValue(1).get(cuttingBitInSecondVariable, BITS_OF_MOCKED_BINARY_PROBLEM)) ; assertEquals(solutions.get(0).getVariableValue(2), resultSolutions.get(1).getVariableValue(2)) ; assertEquals(solutions.get(1).getVariableValue(2), resultSolutions.get(0).getVariableValue(2)) ; verify(randomGenerator, times(1)).nextDouble(); verify(randomGenerator, times(1)).nextInt(0, BITS_OF_MOCKED_BINARY_PROBLEM*3 - 1); } /** * Mock class representing a binary problem */ @SuppressWarnings("serial") private class MockBinaryProblem extends AbstractBinaryProblem { private int[] bitsPerVariable ; /** Constructor */ public MockBinaryProblem(Integer numberOfVariables) { setNumberOfVariables(numberOfVariables); setNumberOfObjectives(2); bitsPerVariable = new int[numberOfVariables] ; for (int var = 0; var < numberOfVariables; var++) { bitsPerVariable[var] = BITS_OF_MOCKED_BINARY_PROBLEM; } } @Override protected int getBitsPerVariable(int index) { return bitsPerVariable[index] ; } @Override public BinarySolution createSolution() { return new DefaultBinarySolution(this) ; } /** Evaluate() method */ @Override public void evaluate(BinarySolution solution) { solution.setObjective(0, 0); solution.setObjective(1, 1); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.index.indexer.document.flatfile; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.lang.management.MemoryNotificationInfo; import java.lang.management.MemoryPoolMXBean; import java.lang.management.MemoryUsage; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import javax.management.Notification; import javax.management.NotificationEmitter; import javax.management.NotificationListener; import javax.management.openmbean.CompositeData; import com.google.common.base.Stopwatch; import org.apache.commons.io.FileUtils; import org.apache.jackrabbit.oak.commons.sort.ExternalSort; import org.apache.jackrabbit.oak.index.indexer.document.LastModifiedRange; import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntry; import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntryTraverser; import org.apache.jackrabbit.oak.index.indexer.document.NodeStateEntryTraverserFactory; import org.apache.jackrabbit.oak.plugins.document.mongo.MongoDocumentTraverser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static com.google.common.base.Charsets.UTF_8; import static java.lang.management.ManagementFactory.getMemoryMXBean; import static java.lang.management.ManagementFactory.getMemoryPoolMXBeans; import static java.lang.management.MemoryType.HEAP; import static org.apache.commons.io.FileUtils.ONE_GB; import static org.apache.jackrabbit.oak.commons.IOUtils.humanReadableByteCount; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_MAX_SORT_MEMORY_IN_GB; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileNodeStoreBuilder.OAK_INDEXER_MAX_SORT_MEMORY_IN_GB_DEFAULT; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.createWriter; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.getSortedStoreFileName; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.FlatFileStoreUtils.sizeOf; class TraverseWithSortStrategy implements SortStrategy { private static final String OAK_INDEXER_MIN_MEMORY = "oak.indexer.minMemoryForWork"; private final Logger log = LoggerFactory.getLogger(getClass()); private final AtomicBoolean sufficientMemory = new AtomicBoolean(true); private final NodeStateEntryTraverserFactory nodeStatesFactory; private final NodeStateEntryWriter entryWriter; private final File storeDir; private final boolean compressionEnabled; private final Charset charset = UTF_8; private final Comparator<NodeStateHolder> comparator; private NotificationEmitter emitter; private MemoryListener listener; private final int maxMemory = Integer.getInteger(OAK_INDEXER_MAX_SORT_MEMORY_IN_GB, OAK_INDEXER_MAX_SORT_MEMORY_IN_GB_DEFAULT); private final long minMemory = Integer.getInteger(OAK_INDEXER_MIN_MEMORY, 2); /** * Max memory to be used if jmx based memory monitoring is not available. This value is not considered if jmx based * monitoring is available. */ private final long maxMemoryBytes = maxMemory * ONE_GB; /** * When jmx based memory monitoring is available, this value indicates minimum memory which should be free/available for this * task to proceed. */ private final long minMemoryBytes = minMemory * ONE_GB; private boolean useMaxMemory; private long entryCount; private long memoryUsed; private File sortWorkDir; private List<File> sortedFiles = new ArrayList<>(); private ArrayList<NodeStateHolder> entryBatch = new ArrayList<>(); TraverseWithSortStrategy(NodeStateEntryTraverserFactory nodeStatesFactory, PathElementComparator pathComparator, NodeStateEntryWriter entryWriter, File storeDir, boolean compressionEnabled) { this.nodeStatesFactory = nodeStatesFactory; this.entryWriter = entryWriter; this.storeDir = storeDir; this.compressionEnabled = compressionEnabled; this.comparator = (e1, e2) -> pathComparator.compare(e1.getPathElements(), e2.getPathElements()); } @Override public File createSortedStoreFile() throws IOException { try (NodeStateEntryTraverser nodeStates = nodeStatesFactory.create(new MongoDocumentTraverser.TraversingRange(new LastModifiedRange(0, Long.MAX_VALUE),null))) { logFlags(); configureMemoryListener(); sortWorkDir = createdSortWorkDir(storeDir); writeToSortedFiles(nodeStates); return sortStoreFile(); } } @Override public long getEntryCount() { return entryCount; } private File sortStoreFile() throws IOException { log.info("Proceeding to perform merge of {} sorted files", sortedFiles.size()); Stopwatch w = Stopwatch.createStarted(); File sortedFile = new File(storeDir, getSortedStoreFileName(compressionEnabled)); try(BufferedWriter writer = createWriter(sortedFile, compressionEnabled)) { Function<String, NodeStateHolder> func1 = (line) -> line == null ? null : new SimpleNodeStateHolder(line); Function<NodeStateHolder, String> func2 = holder -> holder == null ? null : holder.getLine(); ExternalSort.mergeSortedFiles(sortedFiles, writer, comparator, charset, true, //distinct compressionEnabled, //useZip func2, func1 ); } log.info("Merging of sorted files completed in {}", w); return sortedFile; } private void writeToSortedFiles(NodeStateEntryTraverser nodeStates) throws IOException { Stopwatch w = Stopwatch.createStarted(); for (NodeStateEntry e : nodeStates) { entryCount++; addEntry(e); } //Save the last batch sortAndSaveBatch(); //Free up the batch entryBatch.clear(); entryBatch.trimToSize(); log.info("Dumped {} nodestates in json format in {}",entryCount, w); log.info("Created {} sorted files of size {} to merge", sortedFiles.size(), humanReadableByteCount(sizeOf(sortedFiles))); } private void addEntry(NodeStateEntry e) throws IOException { if (isMemoryLow()) { sortAndSaveBatch(); reset(); } String jsonText = entryWriter.asJson(e.getNodeState()); //Here logic differs from NodeStateEntrySorter in sense that //Holder line consist only of json and not 'path|json' NodeStateHolder h = new StateInBytesHolder(e.getPath(), jsonText); entryBatch.add(h); updateMemoryUsed(h); } private void reset() { entryBatch.clear(); memoryUsed = 0; sufficientMemory.set(true); } private void sortAndSaveBatch() throws IOException { if (entryBatch.isEmpty()) { return; } entryBatch.sort(comparator); Stopwatch w = Stopwatch.createStarted(); File newtmpfile = File.createTempFile("sortInBatch", "flatfile", sortWorkDir); long textSize = 0; try (BufferedWriter writer = FlatFileStoreUtils.createWriter(newtmpfile, compressionEnabled)) { for (NodeStateHolder h : entryBatch) { //Here holder line only contains nodeState json String text = entryWriter.toString(h.getPathElements(), h.getLine()); writer.write(text); writer.newLine(); textSize += text.length() + 1; } } log.info("Sorted and stored batch of size {} (uncompressed {}) with {} entries in {}", humanReadableByteCount(newtmpfile.length()), humanReadableByteCount(textSize),entryBatch.size(), w); sortedFiles.add(newtmpfile); } private boolean isMemoryLow() { if (useMaxMemory){ return memoryUsed > maxMemoryBytes; } return !sufficientMemory.get(); } private void updateMemoryUsed(NodeStateHolder h) { memoryUsed += h.getMemorySize(); } private static File createdSortWorkDir(File storeDir) throws IOException { File sortedFileDir = new File(storeDir, "sort-work-dir"); FileUtils.forceMkdir(sortedFileDir); return sortedFileDir; } private void logFlags() { log.info("Min heap memory (GB) to be required : {} ({})", minMemory, OAK_INDEXER_MIN_MEMORY); log.info("Max heap memory (GB) to be used for merge sort : {} ({})", maxMemory, OAK_INDEXER_MAX_SORT_MEMORY_IN_GB); } //~-------------------------------------< memory management > private void configureMemoryListener() { MemoryPoolMXBean pool = getMemoryPool(); if (pool == null) { log.warn("Unable to setup monitoring of available memory. " + "Would use configured maxMemory limit of {} GB", maxMemory); useMaxMemory = true; return; } emitter = (NotificationEmitter) getMemoryMXBean(); listener = new MemoryListener(); emitter.addNotificationListener(listener, null, null); MemoryUsage usage = pool.getCollectionUsage(); long maxMemory = usage.getMax(); long warningThreshold = minMemory * ONE_GB; if (warningThreshold > maxMemory) { log.warn("Configured minimum memory {} GB more than available memory ({})." + "Overriding configuration accordingly.", minMemory, humanReadableByteCount(maxMemory)); warningThreshold = maxMemory; } log.info("Setting up a listener to monitor pool '{}' and trigger batch save " + "if memory drop below {} GB (max {})", pool.getName(), minMemory, humanReadableByteCount(maxMemory)); pool.setCollectionUsageThreshold(warningThreshold); checkMemory(usage); } private void checkMemory(MemoryUsage usage) { long maxMemory = usage.getMax(); long usedMemory = usage.getUsed(); long avail = maxMemory - usedMemory; if (avail > minMemoryBytes) { sufficientMemory.set(true); log.info("Available memory level {} is good. Current batch size {}", humanReadableByteCount(avail), entryBatch.size()); } else { sufficientMemory.set(false); log.info("Available memory level {} (required {}) is low. Enabling flag to trigger batch save", humanReadableByteCount(avail), minMemory); } } //Taken from GCMemoryBarrier private class MemoryListener implements NotificationListener { @Override public void handleNotification(Notification notification, Object handback) { if (notification .getType() .equals(MemoryNotificationInfo.MEMORY_COLLECTION_THRESHOLD_EXCEEDED)) { if (sufficientMemory.get()) { CompositeData cd = (CompositeData) notification .getUserData(); MemoryNotificationInfo info = MemoryNotificationInfo .from(cd); checkMemory(info.getUsage()); } } } } private static MemoryPoolMXBean getMemoryPool() { long maxSize = 0; MemoryPoolMXBean maxPool = null; for (MemoryPoolMXBean pool : getMemoryPoolMXBeans()) { if (HEAP == pool.getType() && pool.isCollectionUsageThresholdSupported()) { // Get usage after a GC, which is more stable, if available long poolSize = pool.getCollectionUsage().getMax(); // Keep the pool with biggest size, by default it should be Old Gen Space if (poolSize > maxSize) { maxPool = pool; } } } return maxPool; } }
/* * MIT License * * Copyright (c) 2014 Klemm Software Consulting, Mirko Klemm * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.kscs.util.plugins.xjc.base; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.net.URL; import java.nio.charset.Charset; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.NoSuchElementException; import java.util.Properties; import java.util.ResourceBundle; import java.util.Set; import java.util.logging.Logger; /** * @author Mirko Klemm 2015-02-25 */ public class PropertyDirectoryResourceBundle extends ResourceBundle { private static final Logger LOGGER = Logger.getLogger(PropertyDirectoryResourceBundle.class.getName()); private final Map<String, String> values; public static ResourceBundle getInstance(final String baseName) { return getBundle(baseName, Control.INSTANCE); } public static ResourceBundle getInstance(final String baseName, final Locale locale) { return getBundle(baseName, locale, Control.INSTANCE); } public static ResourceBundle getInstance(final Class<?> localizedClass) { return getBundle(localizedClass.getName(), Control.INSTANCE); } public static ResourceBundle getInstance(final Class<?> localizedClass, final Locale locale) { return getBundle(localizedClass.getName(), locale, Control.INSTANCE); } public PropertyDirectoryResourceBundle(final Map<String, String> values) { this.values = values; } @Override protected Object handleGetObject(final String key) { return this.values.get(key); } @Override public Enumeration<String> getKeys() { final ResourceBundle parent = this.parent; return new ResourceBundleEnumeration(this.values.keySet(), parent != null ? parent.getKeys() : null); } @Override protected Set<String> handleKeySet() { return this.values.keySet(); } public static class Control extends ResourceBundle.Control { public static final Control INSTANCE = new Control(); final Map<String, PropertyDirectoryResourceBundle> cachedBundles = new HashMap<>(); private Control() { // singleton } @Override public List<String> getFormats(final String baseName) { final List<String> formats = new ArrayList<>(); formats.add("text"); formats.addAll(super.getFormats(baseName)); return Collections.unmodifiableList(formats); } @Override public ResourceBundle newBundle(final String baseName, final Locale locale, final String format, final ClassLoader loader, final boolean reload) throws IllegalAccessException, InstantiationException, IOException { if ("text".equals(format)) { final String bundleLocalizedName = toBundleName(baseName, locale); if (!reload && this.cachedBundles.containsKey(bundleLocalizedName)) { return this.cachedBundles.get(bundleLocalizedName); } final String bundleIndexPath = baseName.replace('.', '/') + ".index"; final Properties bundleIndex = new Properties(); final InputStream inputStream = loader.getResourceAsStream(bundleIndexPath); if (inputStream != null) { PropertyDirectoryResourceBundle.LOGGER.finer("Loading text file resource index: " + bundleIndexPath); bundleIndex.load(inputStream); } final String bundleResourcePath = bundleLocalizedName.replace('.', '/'); final Path dirPath = ResourceDirectory.fromResource(loader, bundleResourcePath); PropertyDirectoryResourceBundle.LOGGER.finer("Resolved directory path: " + dirPath); if(dirPath == null) { return super.newBundle(baseName, locale, format, loader, reload); } try(final DirectoryStream<Path> directoryStream = Files.newDirectoryStream(dirPath)) { for (final Path filePath : directoryStream) { if (!bundleIndex.containsValue(filePath.getFileName().toString())) { final String fileName = filePath.getFileName().toString(); bundleIndex.setProperty(fileName.substring(0, fileName.lastIndexOf('.')), fileName); } } } final Map<String, String> values = new HashMap<>(); for (final Object key : bundleIndex.keySet()) { final String propertyValue = bundleIndex.getProperty(key.toString()); final String textFileName = propertyValue == null || propertyValue.trim().length() == 0 ? key.toString() : propertyValue; final Path textFilePath = Paths.get(bundleResourcePath, textFileName); try { PropertyDirectoryResourceBundle.LOGGER.finest("Loading resource text file \"" + textFilePath + "\""); final URL textFileURL = loader.getResource(textFilePath.toString()); PropertyDirectoryResourceBundle.LOGGER.finest("Resource text file \"" + textFilePath + "\" URL: " + textFileURL); final StringBuilder sb = new StringBuilder(); try(final BufferedReader reader = new BufferedReader(new InputStreamReader(loader.getResourceAsStream(textFilePath.toString()), Charset.forName("UTF-8")))) { String line; while ((line = reader.readLine()) != null) { sb.append(line); sb.append("\n"); } } PropertyDirectoryResourceBundle.LOGGER.finest("Text file \"" + textFilePath + "\" loaded."); values.put(key.toString(), sb.toString()); } catch (final Exception e) { return super.newBundle(baseName, locale, format, loader, reload); } } final PropertyDirectoryResourceBundle bundle = new PropertyDirectoryResourceBundle(values); bundle.setParent(super.newBundle(baseName, locale, "java.properties", loader, reload)); this.cachedBundles.put(bundleLocalizedName, bundle); return bundle; } else { return super.newBundle(baseName, locale, format, loader, reload); } } } } class ResourceBundleEnumeration implements Enumeration<String> { private Set<String> set; private Iterator<String> iterator; private Enumeration<String> enumeration; // may remain null /** * Constructs a resource bundle enumeration. * @param set an set providing some elements of the enumeration * @param enumeration an enumeration providing more elements of the enumeration. * enumeration may be null. */ public ResourceBundleEnumeration(final Set<String> set, final Enumeration<String> enumeration) { this.set = set; this.iterator = set.iterator(); this.enumeration = enumeration; } String next = null; public boolean hasMoreElements() { if (this.next == null) { if (this.iterator.hasNext()) { this.next = this.iterator.next(); } else if (this.enumeration != null) { while (this.next == null && this.enumeration.hasMoreElements()) { this.next = this.enumeration.nextElement(); if (this.set.contains(this.next)) { this.next = null; } } } } return this.next != null; } public String nextElement() { if (hasMoreElements()) { final String result = this.next; this.next = null; return result; } else { throw new NoSuchElementException(); } } }
/** * */ package edu.berkeley.nlp.PCFGLA; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import edu.berkeley.nlp.syntax.Tree; import edu.berkeley.nlp.util.Numberer; import edu.berkeley.nlp.util.Pair; /** * @author petrov * */ public class HierarchicalAdaptiveUnaryRule extends HierarchicalUnaryRule { private static final long serialVersionUID = 1L; public short[][] mapping; Tree<Double> hierarchy; public int nParam; public SubRule[] subRuleList; // assume for now that the rule being passed in is unsplit public HierarchicalAdaptiveUnaryRule(UnaryRule b) { super(b); hierarchy = new Tree<Double>(0.0); scores = new double[1][1]; mapping = new short[1][1]; // to parameters nParam = 1; } public Pair<Integer, Integer> countParameters() { // first one is the max_depth, second one is the number of parameters int maxDepth = hierarchy.getDepth(); nParam = hierarchy.getYield().size(); return new Pair<Integer, Integer>(maxDepth, nParam); } @Override public HierarchicalAdaptiveUnaryRule splitRule(short[] numSubStates, short[] newNumSubStates, Random random, double randomness, boolean doNotNormalize, int mode) { int splitFactor = 4; splitRuleHelper(hierarchy, random, splitFactor); // mapping = new // short[newNumSubStates[this.childState]][newNumSubStates[this.childState]]; // int finalLevel = (int)(Math.log(mapping[0].length)/Math.log(2)); // updateMapping((short)0, 0, 0, 0, finalLevel, hierarchy); return this; } @Override public int mergeRule() { int paramBefore = nParam; compactifyHierarchy(hierarchy); scores = null; mapping = null; subRuleList = null; scoreHierarchy = null; return paramBefore - nParam; } // private short updateMapping(short myID, int nextChildSubstate, int // nextParentSubstate, int myDepth, int finalDepth, Tree<Double> tree) { // if (tree.isLeaf()){ // if (myDepth==finalDepth){ // mapping[nextChildSubstate][nextParentSubstate] = myID; // } else { // int substatesToCover = (int)Math.pow(2,finalDepth-myDepth); // nextChildSubstate *= substatesToCover; // nextParentSubstate *= substatesToCover; // for (int i=0; i<substatesToCover; i++){ // for (int j=0; j<substatesToCover; j++){ // mapping[nextChildSubstate+i][nextParentSubstate+j] = myID; // } // } // } // myID++; // } else { // int i = 0; // for (Tree<Double> child : tree.getChildren()){ // myID = updateMapping(myID, nextChildSubstate*2 + (i/2), // nextParentSubstate*2 + (i%2), myDepth+1, finalDepth, child); // i++; // } // } // return myID; // } private void splitRuleHelper(Tree<Double> tree, Random random, int splitFactor) { if (tree.isLeaf()) { if (tree.getLabel() != 0 || nParam == 1) { // split it ArrayList<Tree<Double>> children = new ArrayList<Tree<Double>>( splitFactor); for (int i = 0; i < splitFactor; i++) { Tree<Double> child = new Tree<Double>( random.nextDouble() / 100.0); children.add(child); } tree.setChildren(children); nParam += splitFactor - 1; // } else { //perturb it // tree.setLabel(random.nextDouble()/100.0); } } else { for (Tree<Double> child : tree.getChildren()) { splitRuleHelper(child, random, splitFactor); } } } @Override public void explicitlyComputeScores(int finalLevel, short[] newNumSubStates) { // int nSubstates = (int)Math.pow(2, finalLevel); // scores = new double[nSubstates][nSubstates]; // int nextSubstate = fillScores((short)0, 0, 0, 0, 0, finalLevel, // hierarchy); // if (nextSubstate != nParam) // System.out.println("Didn't fill all scores!"); computeSubRuleList(); } // private short fillScores(short myID, double previousScore, int // nextChildSubstate, int nextParentSubstate, int myDepth, int finalDepth, // Tree<Double> tree){ // if (tree.isLeaf()){ // double myScore = Math.exp(previousScore + tree.getLabel()); // if (myDepth==finalDepth){ // scores[nextChildSubstate][nextParentSubstate] = myScore; // } else { // int substatesToCover = (int)Math.pow(2,finalDepth-myDepth); // nextChildSubstate *= substatesToCover; // nextParentSubstate *= substatesToCover; // for (int i=0; i<substatesToCover; i++){ // for (int j=0; j<substatesToCover; j++){ // scores[nextChildSubstate+i][nextParentSubstate+j] = myScore; // } // } // } // myID++; // } else { // double myScore = previousScore + tree.getLabel(); // int i = 0; // for (Tree<Double> child : tree.getChildren()){ // myID = fillScores(myID, myScore, nextChildSubstate*2 + (i/2), // nextParentSubstate*2 + (i%2), myDepth+1, finalDepth, child); // i++; // } // } // return myID; // } public void updateScores(double[] scores) { int nSubstates = updateHierarchy(hierarchy, 0, scores); if (nSubstates != nParam) System.out.println("Didn't update all parameters"); } private int updateHierarchy(Tree<Double> tree, int nextSubstate, double[] scores) { if (tree.isLeaf()) { double val = scores[identifier + nextSubstate++]; if (val > 200) { val = 0; System.out .println("Ignored proposed unary value since it was danegrous"); } else tree.setLabel(val); } else { for (Tree<Double> child : tree.getChildren()) { nextSubstate = updateHierarchy(child, nextSubstate, scores); } } return nextSubstate; } /** * @return */ public List<Double> getFinalLevel() { return hierarchy.getYield(); } private void compactifyHierarchy(Tree<Double> tree) { if (tree.getDepth() == 2) { boolean allZero = true; for (Tree<Double> child : tree.getChildren()) { allZero = allZero && child.getLabel() == 0; } if (allZero) { nParam -= tree.getChildren().size() - 1; tree.setChildren(Collections.EMPTY_LIST); } } else { for (Tree<Double> child : tree.getChildren()) { compactifyHierarchy(child); } } } public String toStringShort() { Numberer n = Numberer.getGlobalNumberer("tags"); String cState = (String) n.object(childState); String pState = (String) n.object(parentState); return (pState + " -> " + cState); } @Override public String toString() { StringBuilder sb = new StringBuilder(); Numberer n = Numberer.getGlobalNumberer("tags"); String cState = (String) n.object(childState); String pState = (String) n.object(parentState); sb.append(pState + " -> " + cState + "\n"); if (subRuleList == null) { compactifyHierarchy(hierarchy); lastLevel = hierarchy.getDepth(); computeSubRuleList(); } for (SubRule rule : subRuleList) { if (rule == null) continue; sb.append(rule.toString(lastLevel - 1)); sb.append("\n"); } // sb.append(PennTreeRenderer.render(hierarchy)); sb.append("\n"); // sb.append(Arrays.toString(scores)); return sb.toString(); } @Override public int countNonZeroFeatures() { int total = 0; for (Tree<Double> d : hierarchy.getPreOrderTraversal()) { if (d.getLabel() != 0) total++; } return total; } public int countNonZeroFringeFeatures() { int total = 0; for (Tree<Double> d : hierarchy.getTerminals()) { if (d.getLabel() != 0) total++; } return total; } public void computeSubRuleList() { subRuleList = new SubRule[nParam]; int nRules = computeSubRules(0, 0, 0, 0, 0, hierarchy); if (parentState != 0 && nRules != nParam) System.out.println("A rule got lost"); } private int computeSubRules(int myID, double previousScore, int nextChildSubstate, int nextParentSubstate, int myDepth, Tree<Double> tree) { if (tree.isLeaf()) { if (parentState == 0 && nextParentSubstate > 0) { myID++; return myID; } double myScore = Math.exp(previousScore + tree.getLabel()); SubRule rule = new SubRule((short) nextChildSubstate, (short) nextParentSubstate, (short) myDepth, myScore); subRuleList[myID] = rule; myID++; } else { double myScore = previousScore + tree.getLabel(); int i = 0; for (Tree<Double> child : tree.getChildren()) { myID = computeSubRules(myID, myScore, nextChildSubstate * 2 + (i / 2), nextParentSubstate * 2 + (i % 2), myDepth + 1, child); i++; } } return myID; } class SubRule implements Serializable { private static final long serialVersionUID = 1; short child, parent, level; double score; SubRule(short c, short p, short l, double s) { child = c; parent = p; level = l; score = s; } @Override public String toString() { String s = "[" + parent + "] \t -> \t [" + child + "] \t " + score; return s; } public String toString(int finalLevel) { if (finalLevel == level) return toString(); int k = (int) Math.pow(2, finalLevel - level); String s = "[" + (k * parent) + "-" + (k * parent + k - 1) + "] \t -> \t [" + (k * child) + "-" + (k * child + k - 1) + "] \t " + score + "\t level: " + level; ; return s; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.bootstrap; import com.sun.jna.Native; import com.sun.jna.Pointer; import com.sun.jna.WString; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; import org.elasticsearch.monitor.jvm.JvmInfo; import java.nio.file.Path; import static org.elasticsearch.bootstrap.JNAKernel32Library.SizeT; /** * This class performs the actual work with JNA and library bindings to call native methods. It should only be used after * we are sure that the JNA classes are available to the JVM */ class JNANatives { /** no instantiation */ private JNANatives() {} private static final Logger logger = LogManager.getLogger(JNANatives.class); // Set to true, in case native mlockall call was successful static boolean LOCAL_MLOCKALL = false; // Set to true, in case native system call filter install was successful static boolean LOCAL_SYSTEM_CALL_FILTER = false; // Set to true, in case policy can be applied to all threads of the process (even existing ones) // otherwise they are only inherited for new threads (ES app threads) static boolean LOCAL_SYSTEM_CALL_FILTER_ALL = false; // set to the maximum number of threads that can be created for // the user ID that owns the running Elasticsearch process static long MAX_NUMBER_OF_THREADS = -1; static long MAX_SIZE_VIRTUAL_MEMORY = Long.MIN_VALUE; static long MAX_FILE_SIZE = Long.MIN_VALUE; static void tryMlockall() { int errno = Integer.MIN_VALUE; String errMsg = null; boolean rlimitSuccess = false; long softLimit = 0; long hardLimit = 0; try { int result = JNACLibrary.mlockall(JNACLibrary.MCL_CURRENT); if (result == 0) { LOCAL_MLOCKALL = true; return; } errno = Native.getLastError(); errMsg = JNACLibrary.strerror(errno); if (Constants.LINUX || Constants.MAC_OS_X) { // we only know RLIMIT_MEMLOCK for these two at the moment. JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit(); if (JNACLibrary.getrlimit(JNACLibrary.RLIMIT_MEMLOCK, rlimit) == 0) { rlimitSuccess = true; softLimit = rlimit.rlim_cur.longValue(); hardLimit = rlimit.rlim_max.longValue(); } else { logger.warn("Unable to retrieve resource limits: {}", JNACLibrary.strerror(Native.getLastError())); } } } catch (UnsatisfiedLinkError e) { // this will have already been logged by CLibrary, no need to repeat it return; } // mlockall failed for some reason logger.warn("Unable to lock JVM Memory: error={}, reason={}", errno , errMsg); logger.warn("This can result in part of the JVM being swapped out."); if (errno == JNACLibrary.ENOMEM) { if (rlimitSuccess) { logger.warn("Increase RLIMIT_MEMLOCK, soft limit: {}, hard limit: {}", rlimitToString(softLimit), rlimitToString(hardLimit)); if (Constants.LINUX) { // give specific instructions for the linux case to make it easy String user = System.getProperty("user.name"); logger.warn("These can be adjusted by modifying /etc/security/limits.conf, for example: \n" + "\t# allow user '{}' mlockall\n" + "\t{} soft memlock unlimited\n" + "\t{} hard memlock unlimited", user, user, user ); logger.warn("If you are logged in interactively, you will have to re-login for the new limits to take effect."); } } else { logger.warn("Increase RLIMIT_MEMLOCK (ulimit)."); } } } static void trySetMaxNumberOfThreads() { if (Constants.LINUX) { // this is only valid on Linux and the value *is* different on OS X // see /usr/include/sys/resource.h on OS X // on Linux the resource RLIMIT_NPROC means *the number of threads* // this is in opposition to BSD-derived OSes final int rlimit_nproc = 6; final JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit(); if (JNACLibrary.getrlimit(rlimit_nproc, rlimit) == 0) { MAX_NUMBER_OF_THREADS = rlimit.rlim_cur.longValue(); } else { logger.warn("unable to retrieve max number of threads [" + JNACLibrary.strerror(Native.getLastError()) + "]"); } } } static void trySetMaxSizeVirtualMemory() { if (Constants.LINUX || Constants.MAC_OS_X) { final JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit(); if (JNACLibrary.getrlimit(JNACLibrary.RLIMIT_AS, rlimit) == 0) { MAX_SIZE_VIRTUAL_MEMORY = rlimit.rlim_cur.longValue(); } else { logger.warn("unable to retrieve max size virtual memory [" + JNACLibrary.strerror(Native.getLastError()) + "]"); } } } static void trySetMaxFileSize() { if (Constants.LINUX || Constants.MAC_OS_X) { final JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit(); if (JNACLibrary.getrlimit(JNACLibrary.RLIMIT_FSIZE, rlimit) == 0) { MAX_FILE_SIZE = rlimit.rlim_cur.longValue(); } else { logger.warn("unable to retrieve max file size [" + JNACLibrary.strerror(Native.getLastError()) + "]"); } } } static String rlimitToString(long value) { assert Constants.LINUX || Constants.MAC_OS_X; if (value == JNACLibrary.RLIM_INFINITY) { return "unlimited"; } else { return Long.toUnsignedString(value); } } /** Returns true if user is root, false if not, or if we don't know */ static boolean definitelyRunningAsRoot() { if (Constants.WINDOWS) { return false; // don't know } try { return JNACLibrary.geteuid() == 0; } catch (UnsatisfiedLinkError e) { // this will have already been logged by Kernel32Library, no need to repeat it return false; } } static void tryVirtualLock() { JNAKernel32Library kernel = JNAKernel32Library.getInstance(); Pointer process = null; try { process = kernel.GetCurrentProcess(); // By default, Windows limits the number of pages that can be locked. // Thus, we need to first increase the working set size of the JVM by // the amount of memory we wish to lock, plus a small overhead (1MB). SizeT size = new SizeT(JvmInfo.jvmInfo().getMem().getHeapInit().getBytes() + (1024 * 1024)); if (!kernel.SetProcessWorkingSetSize(process, size, size)) { logger.warn("Unable to lock JVM memory. Failed to set working set size. Error code {}", Native.getLastError()); } else { JNAKernel32Library.MemoryBasicInformation memInfo = new JNAKernel32Library.MemoryBasicInformation(); long address = 0; while (kernel.VirtualQueryEx(process, new Pointer(address), memInfo, memInfo.size()) != 0) { boolean lockable = memInfo.State.longValue() == JNAKernel32Library.MEM_COMMIT && (memInfo.Protect.longValue() & JNAKernel32Library.PAGE_NOACCESS) != JNAKernel32Library.PAGE_NOACCESS && (memInfo.Protect.longValue() & JNAKernel32Library.PAGE_GUARD) != JNAKernel32Library.PAGE_GUARD; if (lockable) { kernel.VirtualLock(memInfo.BaseAddress, new SizeT(memInfo.RegionSize.longValue())); } // Move to the next region address += memInfo.RegionSize.longValue(); } LOCAL_MLOCKALL = true; } } catch (UnsatisfiedLinkError e) { // this will have already been logged by Kernel32Library, no need to repeat it } finally { if (process != null) { kernel.CloseHandle(process); } } } /** * Retrieves the short path form of the specified path. * * @param path the path * @return the short path name (or the original path if getting the short path name fails for any reason) */ static String getShortPathName(String path) { assert Constants.WINDOWS; try { final WString longPath = new WString("\\\\?\\" + path); // first we get the length of the buffer needed final int length = JNAKernel32Library.getInstance().GetShortPathNameW(longPath, null, 0); if (length == 0) { logger.warn("failed to get short path name: {}", Native.getLastError()); return path; } final char[] shortPath = new char[length]; // knowing the length of the buffer, now we get the short name if (JNAKernel32Library.getInstance().GetShortPathNameW(longPath, shortPath, length) > 0) { return Native.toString(shortPath); } else { logger.warn("failed to get short path name: {}", Native.getLastError()); return path; } } catch (final UnsatisfiedLinkError e) { return path; } } static void addConsoleCtrlHandler(ConsoleCtrlHandler handler) { // The console Ctrl handler is necessary on Windows platforms only. if (Constants.WINDOWS) { try { boolean result = JNAKernel32Library.getInstance().addConsoleCtrlHandler(handler); if (result) { logger.debug("console ctrl handler correctly set"); } else { logger.warn("unknown error {} when adding console ctrl handler", Native.getLastError()); } } catch (UnsatisfiedLinkError e) { // this will have already been logged by Kernel32Library, no need to repeat it } } } static void tryInstallSystemCallFilter(Path tmpFile) { try { int ret = SystemCallFilter.init(tmpFile); LOCAL_SYSTEM_CALL_FILTER = true; if (ret == 1) { LOCAL_SYSTEM_CALL_FILTER_ALL = true; } } catch (Exception e) { // this is likely to happen unless the kernel is newish, its a best effort at the moment // so we log stacktrace at debug for now... if (logger.isDebugEnabled()) { logger.debug("unable to install syscall filter", e); } logger.warn("unable to install syscall filter: ", e); } } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.7 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2017.07.24 at 05:36:05 PM EEST // package lt.registrucentras.esaskaita.service.invoice.ubl.cac; import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import com.google.common.base.Objects; import lt.registrucentras.esaskaita.service.invoice.ubl.cbc.CompanyLegalFormCodeType; import lt.registrucentras.esaskaita.service.invoice.ubl.cbc.CompanyLegalFormType; import lt.registrucentras.esaskaita.service.invoice.ubl.cbc.DescriptionType; import lt.registrucentras.esaskaita.service.invoice.ubl.cbc.EmployeeQuantityType; import lt.registrucentras.esaskaita.service.invoice.ubl.cbc.OperatingYearsQuantityType; import lt.registrucentras.esaskaita.service.invoice.ubl.cbc.PersonalSituationType; /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;ABIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Details&lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;The evaluation that the Contracting Authority party requests to fulfill to the * tenderers. * &lt;/ccts:Definition&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;/ccts:Component&gt; * </pre> * * * <p>Java class for TendererQualificationRequestType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="TendererQualificationRequestType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2}CompanyLegalFormCode" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2}CompanyLegalForm" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2}PersonalSituation" maxOccurs="unbounded" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2}OperatingYearsQuantity" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2}EmployeeQuantity" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2}Description" maxOccurs="unbounded" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2}RequiredBusinessClassificationScheme" maxOccurs="unbounded" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2}TechnicalEvaluationCriterion" maxOccurs="unbounded" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2}FinancialEvaluationCriterion" maxOccurs="unbounded" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2}SpecificTendererRequirement" maxOccurs="unbounded" minOccurs="0"/> * &lt;element ref="{urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2}EconomicOperatorRole" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "TendererQualificationRequestType", propOrder = { "companyLegalFormCode", "companyLegalForm", "personalSituation", "operatingYearsQuantity", "employeeQuantity", "description", "requiredBusinessClassificationScheme", "technicalEvaluationCriterion", "financialEvaluationCriterion", "specificTendererRequirement", "economicOperatorRole" }) public class TendererQualificationRequestType implements Serializable { private final static long serialVersionUID = 1L; @XmlElement(name = "CompanyLegalFormCode", namespace = "urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2") protected CompanyLegalFormCodeType companyLegalFormCode; @XmlElement(name = "CompanyLegalForm", namespace = "urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2") protected CompanyLegalFormType companyLegalForm; @XmlElement(name = "PersonalSituation", namespace = "urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2") protected List<PersonalSituationType> personalSituation; @XmlElement(name = "OperatingYearsQuantity", namespace = "urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2") protected OperatingYearsQuantityType operatingYearsQuantity; @XmlElement(name = "EmployeeQuantity", namespace = "urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2") protected EmployeeQuantityType employeeQuantity; @XmlElement(name = "Description", namespace = "urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2") protected List<DescriptionType> description; @XmlElement(name = "RequiredBusinessClassificationScheme") protected List<ClassificationSchemeType> requiredBusinessClassificationScheme; @XmlElement(name = "TechnicalEvaluationCriterion") protected List<EvaluationCriterionType> technicalEvaluationCriterion; @XmlElement(name = "FinancialEvaluationCriterion") protected List<EvaluationCriterionType> financialEvaluationCriterion; @XmlElement(name = "SpecificTendererRequirement") protected List<TendererRequirementType> specificTendererRequirement; @XmlElement(name = "EconomicOperatorRole") protected List<EconomicOperatorRoleType> economicOperatorRole; /** * Default no-arg constructor * */ public TendererQualificationRequestType() { super(); } /** * Fully-initialising value constructor * */ public TendererQualificationRequestType(final CompanyLegalFormCodeType companyLegalFormCode, final CompanyLegalFormType companyLegalForm, final List<PersonalSituationType> personalSituation, final OperatingYearsQuantityType operatingYearsQuantity, final EmployeeQuantityType employeeQuantity, final List<DescriptionType> description, final List<ClassificationSchemeType> requiredBusinessClassificationScheme, final List<EvaluationCriterionType> technicalEvaluationCriterion, final List<EvaluationCriterionType> financialEvaluationCriterion, final List<TendererRequirementType> specificTendererRequirement, final List<EconomicOperatorRoleType> economicOperatorRole) { this.companyLegalFormCode = companyLegalFormCode; this.companyLegalForm = companyLegalForm; this.personalSituation = personalSituation; this.operatingYearsQuantity = operatingYearsQuantity; this.employeeQuantity = employeeQuantity; this.description = description; this.requiredBusinessClassificationScheme = requiredBusinessClassificationScheme; this.technicalEvaluationCriterion = technicalEvaluationCriterion; this.financialEvaluationCriterion = financialEvaluationCriterion; this.specificTendererRequirement = specificTendererRequirement; this.economicOperatorRole = economicOperatorRole; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;BBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Company Legal Form Code. Code * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;The legal status requested for potential tenderers, expressed as a code. * &lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..1&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTerm&gt;Company Legal Form Code&lt;/ccts:PropertyTerm&gt;&lt;ccts:RepresentationTerm&gt;Code&lt;/ccts:RepresentationTerm&gt;&lt;ccts:DataType&gt;Code. Type&lt;/ccts:DataType&gt;&lt;/ccts:Component&gt; * </pre> * * * @return * possible object is * {@link CompanyLegalFormCodeType } * */ public CompanyLegalFormCodeType getCompanyLegalFormCode() { return companyLegalFormCode; } /** * Sets the value of the companyLegalFormCode property. * * @param value * allowed object is * {@link CompanyLegalFormCodeType } * */ public void setCompanyLegalFormCode(CompanyLegalFormCodeType value) { this.companyLegalFormCode = value; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;BBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Company Legal Form. Text * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;The legal status requested for potential tenderers, expressed as text * &lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..1&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTerm&gt;Company Legal Form&lt;/ccts:PropertyTerm&gt;&lt;ccts:RepresentationTerm&gt;Text&lt;/ccts:RepresentationTerm&gt;&lt;ccts:DataType&gt;Text. Type&lt;/ccts:DataType&gt;&lt;/ccts:Component&gt; * </pre> * * * @return * possible object is * {@link CompanyLegalFormType } * */ public CompanyLegalFormType getCompanyLegalForm() { return companyLegalForm; } /** * Sets the value of the companyLegalForm property. * * @param value * allowed object is * {@link CompanyLegalFormType } * */ public void setCompanyLegalForm(CompanyLegalFormType value) { this.companyLegalForm = value; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;BBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Personal Situation. Text * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;Text describing the personal situation of the economic operators in this * tendering process. * &lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..n&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTerm&gt;Personal Situation&lt;/ccts:PropertyTerm&gt;&lt;ccts:RepresentationTerm&gt;Text&lt;/ccts:RepresentationTerm&gt;&lt;ccts:DataType&gt;Text. Type&lt;/ccts:DataType&gt;&lt;/ccts:Component&gt; * </pre> * Gets the value of the personalSituation property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the personalSituation property. * * <p> * For example, to add a new item, do as follows: * <pre> * getPersonalSituation().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link PersonalSituationType } * * */ public List<PersonalSituationType> getPersonalSituation() { if (personalSituation == null) { personalSituation = new ArrayList<PersonalSituationType>(); } return this.personalSituation; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;BBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Operating Years. Quantity * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;Textual description of the legal form required for potential tenderers. * &lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..1&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTerm&gt;Operating Years&lt;/ccts:PropertyTerm&gt;&lt;ccts:RepresentationTerm&gt;Quantity&lt;/ccts:RepresentationTerm&gt;&lt;ccts:DataType&gt;Quantity. Type&lt;/ccts:DataType&gt;&lt;/ccts:Component&gt; * </pre> * * * @return * possible object is * {@link OperatingYearsQuantityType } * */ public OperatingYearsQuantityType getOperatingYearsQuantity() { return operatingYearsQuantity; } /** * Sets the value of the operatingYearsQuantity property. * * @param value * allowed object is * {@link OperatingYearsQuantityType } * */ public void setOperatingYearsQuantity(OperatingYearsQuantityType value) { this.operatingYearsQuantity = value; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;BBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Employee. Quantity * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;Textual description of the legal form required for potential tenderers. * &lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..1&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTerm&gt;Employee&lt;/ccts:PropertyTerm&gt;&lt;ccts:RepresentationTerm&gt;Quantity&lt;/ccts:RepresentationTerm&gt;&lt;ccts:DataType&gt;Quantity. Type&lt;/ccts:DataType&gt;&lt;/ccts:Component&gt; * </pre> * * * @return * possible object is * {@link EmployeeQuantityType } * */ public EmployeeQuantityType getEmployeeQuantity() { return employeeQuantity; } /** * Sets the value of the employeeQuantity property. * * @param value * allowed object is * {@link EmployeeQuantityType } * */ public void setEmployeeQuantity(EmployeeQuantityType value) { this.employeeQuantity = value; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;BBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Description. Text * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;Text describing the evaluation requirements for this tenderer. * &lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..n&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTerm&gt;Description&lt;/ccts:PropertyTerm&gt;&lt;ccts:RepresentationTerm&gt;Text&lt;/ccts:RepresentationTerm&gt;&lt;ccts:DataType&gt;Text. Type&lt;/ccts:DataType&gt;&lt;/ccts:Component&gt; * </pre> * Gets the value of the description property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the description property. * * <p> * For example, to add a new item, do as follows: * <pre> * getDescription().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link DescriptionType } * * */ public List<DescriptionType> getDescription() { if (description == null) { description = new ArrayList<DescriptionType>(); } return this.description; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;ASBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Required Business_ Classification * Scheme. Classification Scheme * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;A classification scheme for the business profile.&lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..n&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTermQualifier&gt;Required Business&lt;/ccts:PropertyTermQualifier&gt;&lt;ccts:PropertyTerm&gt;Classification Scheme&lt;/ccts:PropertyTerm&gt;&lt;ccts:AssociatedObjectClass&gt;Classification Scheme&lt;/ccts:AssociatedObjectClass&gt;&lt;ccts:RepresentationTerm&gt;Classification Scheme&lt;/ccts:RepresentationTerm&gt;&lt;/ccts:Component&gt; * </pre> * Gets the value of the requiredBusinessClassificationScheme property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the requiredBusinessClassificationScheme property. * * <p> * For example, to add a new item, do as follows: * <pre> * getRequiredBusinessClassificationScheme().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link ClassificationSchemeType } * * */ public List<ClassificationSchemeType> getRequiredBusinessClassificationScheme() { if (requiredBusinessClassificationScheme == null) { requiredBusinessClassificationScheme = new ArrayList<ClassificationSchemeType>(); } return this.requiredBusinessClassificationScheme; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;ASBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Technical_ Evaluation Criterion. * Evaluation Criterion * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;A technical evaluation criterion required for an economic operator in a * tendering process. * &lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..n&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTermQualifier&gt;Technical&lt;/ccts:PropertyTermQualifier&gt;&lt;ccts:PropertyTerm&gt;Evaluation Criterion&lt;/ccts:PropertyTerm&gt;&lt;ccts:AssociatedObjectClass&gt;Evaluation Criterion&lt;/ccts:AssociatedObjectClass&gt;&lt;ccts:RepresentationTerm&gt;Evaluation Criterion&lt;/ccts:RepresentationTerm&gt;&lt;/ccts:Component&gt; * </pre> * Gets the value of the technicalEvaluationCriterion property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the technicalEvaluationCriterion property. * * <p> * For example, to add a new item, do as follows: * <pre> * getTechnicalEvaluationCriterion().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link EvaluationCriterionType } * * */ public List<EvaluationCriterionType> getTechnicalEvaluationCriterion() { if (technicalEvaluationCriterion == null) { technicalEvaluationCriterion = new ArrayList<EvaluationCriterionType>(); } return this.technicalEvaluationCriterion; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;ASBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Financial_ Evaluation Criterion. * Evaluation Criterion * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;A financial evaluation criterion required for an economic operator in a * tendering process. * &lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..n&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTermQualifier&gt;Financial&lt;/ccts:PropertyTermQualifier&gt;&lt;ccts:PropertyTerm&gt;Evaluation Criterion&lt;/ccts:PropertyTerm&gt;&lt;ccts:AssociatedObjectClass&gt;Evaluation Criterion&lt;/ccts:AssociatedObjectClass&gt;&lt;ccts:RepresentationTerm&gt;Evaluation Criterion&lt;/ccts:RepresentationTerm&gt;&lt;/ccts:Component&gt; * </pre> * Gets the value of the financialEvaluationCriterion property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the financialEvaluationCriterion property. * * <p> * For example, to add a new item, do as follows: * <pre> * getFinancialEvaluationCriterion().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link EvaluationCriterionType } * * */ public List<EvaluationCriterionType> getFinancialEvaluationCriterion() { if (financialEvaluationCriterion == null) { financialEvaluationCriterion = new ArrayList<EvaluationCriterionType>(); } return this.financialEvaluationCriterion; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;ASBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Specific_ Tenderer Requirement. * Tenderer Requirement * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;A requirement to be met by a tenderer.&lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..n&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTermQualifier&gt;Specific&lt;/ccts:PropertyTermQualifier&gt;&lt;ccts:PropertyTerm&gt;Tenderer Requirement&lt;/ccts:PropertyTerm&gt;&lt;ccts:AssociatedObjectClass&gt;Tenderer Requirement&lt;/ccts:AssociatedObjectClass&gt;&lt;ccts:RepresentationTerm&gt;Tenderer Requirement&lt;/ccts:RepresentationTerm&gt;&lt;ccts:Examples&gt;Preregistration in a Business Registry&lt;/ccts:Examples&gt;&lt;/ccts:Component&gt; * </pre> * Gets the value of the specificTendererRequirement property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the specificTendererRequirement property. * * <p> * For example, to add a new item, do as follows: * <pre> * getSpecificTendererRequirement().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link TendererRequirementType } * * */ public List<TendererRequirementType> getSpecificTendererRequirement() { if (specificTendererRequirement == null) { specificTendererRequirement = new ArrayList<TendererRequirementType>(); } return this.specificTendererRequirement; } /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Component xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;&lt;ccts:ComponentType&gt;ASBIE&lt;/ccts:ComponentType&gt;&lt;ccts:DictionaryEntryName&gt;Tenderer Qualification Request. Economic Operator Role * &lt;/ccts:DictionaryEntryName&gt;&lt;ccts:Definition&gt;A class to describe the tenderer contracting role.&lt;/ccts:Definition&gt;&lt;ccts:Cardinality&gt;0..n&lt;/ccts:Cardinality&gt;&lt;ccts:ObjectClass&gt;Tenderer Qualification Request&lt;/ccts:ObjectClass&gt;&lt;ccts:PropertyTerm&gt;Economic Operator Role&lt;/ccts:PropertyTerm&gt;&lt;ccts:AssociatedObjectClass&gt;Economic Operator Role&lt;/ccts:AssociatedObjectClass&gt;&lt;ccts:RepresentationTerm&gt;Economic Operator Role&lt;/ccts:RepresentationTerm&gt;&lt;/ccts:Component&gt; * </pre> * Gets the value of the economicOperatorRole property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the economicOperatorRole property. * * <p> * For example, to add a new item, do as follows: * <pre> * getEconomicOperatorRole().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link EconomicOperatorRoleType } * * */ public List<EconomicOperatorRoleType> getEconomicOperatorRole() { if (economicOperatorRole == null) { economicOperatorRole = new ArrayList<EconomicOperatorRoleType>(); } return this.economicOperatorRole; } public TendererQualificationRequestType withCompanyLegalFormCode(CompanyLegalFormCodeType value) { setCompanyLegalFormCode(value); return this; } public TendererQualificationRequestType withCompanyLegalForm(CompanyLegalFormType value) { setCompanyLegalForm(value); return this; } public TendererQualificationRequestType withPersonalSituation(PersonalSituationType... values) { if (values!= null) { for (PersonalSituationType value: values) { getPersonalSituation().add(value); } } return this; } public TendererQualificationRequestType withPersonalSituation(Collection<PersonalSituationType> values) { if (values!= null) { getPersonalSituation().addAll(values); } return this; } public TendererQualificationRequestType withOperatingYearsQuantity(OperatingYearsQuantityType value) { setOperatingYearsQuantity(value); return this; } public TendererQualificationRequestType withEmployeeQuantity(EmployeeQuantityType value) { setEmployeeQuantity(value); return this; } public TendererQualificationRequestType withDescription(DescriptionType... values) { if (values!= null) { for (DescriptionType value: values) { getDescription().add(value); } } return this; } public TendererQualificationRequestType withDescription(Collection<DescriptionType> values) { if (values!= null) { getDescription().addAll(values); } return this; } public TendererQualificationRequestType withRequiredBusinessClassificationScheme(ClassificationSchemeType... values) { if (values!= null) { for (ClassificationSchemeType value: values) { getRequiredBusinessClassificationScheme().add(value); } } return this; } public TendererQualificationRequestType withRequiredBusinessClassificationScheme(Collection<ClassificationSchemeType> values) { if (values!= null) { getRequiredBusinessClassificationScheme().addAll(values); } return this; } public TendererQualificationRequestType withTechnicalEvaluationCriterion(EvaluationCriterionType... values) { if (values!= null) { for (EvaluationCriterionType value: values) { getTechnicalEvaluationCriterion().add(value); } } return this; } public TendererQualificationRequestType withTechnicalEvaluationCriterion(Collection<EvaluationCriterionType> values) { if (values!= null) { getTechnicalEvaluationCriterion().addAll(values); } return this; } public TendererQualificationRequestType withFinancialEvaluationCriterion(EvaluationCriterionType... values) { if (values!= null) { for (EvaluationCriterionType value: values) { getFinancialEvaluationCriterion().add(value); } } return this; } public TendererQualificationRequestType withFinancialEvaluationCriterion(Collection<EvaluationCriterionType> values) { if (values!= null) { getFinancialEvaluationCriterion().addAll(values); } return this; } public TendererQualificationRequestType withSpecificTendererRequirement(TendererRequirementType... values) { if (values!= null) { for (TendererRequirementType value: values) { getSpecificTendererRequirement().add(value); } } return this; } public TendererQualificationRequestType withSpecificTendererRequirement(Collection<TendererRequirementType> values) { if (values!= null) { getSpecificTendererRequirement().addAll(values); } return this; } public TendererQualificationRequestType withEconomicOperatorRole(EconomicOperatorRoleType... values) { if (values!= null) { for (EconomicOperatorRoleType value: values) { getEconomicOperatorRole().add(value); } } return this; } public TendererQualificationRequestType withEconomicOperatorRole(Collection<EconomicOperatorRoleType> values) { if (values!= null) { getEconomicOperatorRole().addAll(values); } return this; } @Override public String toString() { return Objects.toStringHelper(this).add("companyLegalFormCode", companyLegalFormCode).add("companyLegalForm", companyLegalForm).add("personalSituation", personalSituation).add("operatingYearsQuantity", operatingYearsQuantity).add("employeeQuantity", employeeQuantity).add("description", description).add("requiredBusinessClassificationScheme", requiredBusinessClassificationScheme).add("technicalEvaluationCriterion", technicalEvaluationCriterion).add("financialEvaluationCriterion", financialEvaluationCriterion).add("specificTendererRequirement", specificTendererRequirement).add("economicOperatorRole", economicOperatorRole).toString(); } @Override public int hashCode() { return Objects.hashCode(companyLegalFormCode, companyLegalForm, personalSituation, operatingYearsQuantity, employeeQuantity, description, requiredBusinessClassificationScheme, technicalEvaluationCriterion, financialEvaluationCriterion, specificTendererRequirement, economicOperatorRole); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null) { return false; } if (getClass()!= other.getClass()) { return false; } final TendererQualificationRequestType o = ((TendererQualificationRequestType) other); return ((((((((((Objects.equal(companyLegalFormCode, o.companyLegalFormCode)&&Objects.equal(companyLegalForm, o.companyLegalForm))&&Objects.equal(personalSituation, o.personalSituation))&&Objects.equal(operatingYearsQuantity, o.operatingYearsQuantity))&&Objects.equal(employeeQuantity, o.employeeQuantity))&&Objects.equal(description, o.description))&&Objects.equal(requiredBusinessClassificationScheme, o.requiredBusinessClassificationScheme))&&Objects.equal(technicalEvaluationCriterion, o.technicalEvaluationCriterion))&&Objects.equal(financialEvaluationCriterion, o.financialEvaluationCriterion))&&Objects.equal(specificTendererRequirement, o.specificTendererRequirement))&&Objects.equal(economicOperatorRole, o.economicOperatorRole)); } }
package com.tgb.media.helper; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.animation.TimeInterpolator; import android.animation.ValueAnimator; import android.content.Context; import android.content.res.TypedArray; import android.os.Build; import android.util.AttributeSet; import android.view.ViewGroup; import android.view.animation.AccelerateDecelerateInterpolator; import android.widget.TextView; import com.tgb.media.R; import java.lang.reflect.Field; /** * Copyright (C) 2016 Cliff Ophalvens (Blogc.at) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @author Cliff Ophalvens (Blogc.at) */ public class ExpandableTextView extends TextView { // copy off TextView.LINES private static final int MAXMODE_LINES = 1; private OnExpandListener onExpandListener; private TimeInterpolator expandInterpolator; private TimeInterpolator collapseInterpolator; private final int maxLines; private long animationDuration; private boolean animating; private boolean expanded; private int collapsedHeight; public ExpandableTextView(final Context context) { this(context, null); } public ExpandableTextView(final Context context, final AttributeSet attrs) { this(context, attrs, 0); } public ExpandableTextView(final Context context, final AttributeSet attrs, final int defStyle) { super(context, attrs, defStyle); // read attributes final TypedArray attributes = context.obtainStyledAttributes(attrs, R.styleable.ExpandableTextView, defStyle, 0); this.animationDuration = attributes.getInt(R.styleable.ExpandableTextView_animation_duration, 500); attributes.recycle(); // keep the original value of maxLines this.maxLines = this.getMaxLines(); // create default interpolators this.expandInterpolator = new AccelerateDecelerateInterpolator(); this.collapseInterpolator = new AccelerateDecelerateInterpolator(); } @Override public int getMaxLines() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) { return super.getMaxLines(); } try { final Field mMaxMode = TextView.class.getField("mMaxMode"); mMaxMode.setAccessible(true); final Field mMaximum = TextView.class.getField("mMaximum"); mMaximum.setAccessible(true); final int mMaxModeValue = (int) mMaxMode.get(this); final int mMaximumValue = (int) mMaximum.get(this); return mMaxModeValue == MAXMODE_LINES ? mMaximumValue : -1; } catch (final Exception e) { return -1; } } /** * Toggle the expanded state of this {@link ExpandableTextView}. * @return true if toggled, false otherwise. */ public boolean toggle() { return this.expanded ? this.collapse() : this.expand(); } /** * Expand this {@link ExpandableTextView}. * @return true if expanded, false otherwise. */ public boolean expand() { if (!this.expanded && !this.animating && this.maxLines >= 0) { this.animating = true; // notify listener if (this.onExpandListener != null) { this.onExpandListener.onExpand(this); } // get collapsed height this.measure ( MeasureSpec.makeMeasureSpec(this.getMeasuredWidth(), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED) ); this.collapsedHeight = this.getMeasuredHeight(); // set maxLines to MAX Integer, so we can calculate the expanded height this.setMaxLines(Integer.MAX_VALUE); // get expanded height this.measure ( MeasureSpec.makeMeasureSpec(this.getMeasuredWidth(), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED) ); final int expandedHeight = this.getMeasuredHeight(); // animate from collapsed height to expanded height final ValueAnimator valueAnimator = ValueAnimator.ofInt(this.collapsedHeight, expandedHeight); valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(final ValueAnimator animation) { final ViewGroup.LayoutParams layoutParams = ExpandableTextView.this.getLayoutParams(); layoutParams.height = (int) animation.getAnimatedValue(); ExpandableTextView.this.setLayoutParams(layoutParams); } }); valueAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(final Animator animation) { // if fully expanded, set height to WRAP_CONTENT, because when rotating the device // the height calculated with this ValueAnimator isn't correct anymore final ViewGroup.LayoutParams layoutParams = ExpandableTextView.this.getLayoutParams(); layoutParams.height = ViewGroup.LayoutParams.WRAP_CONTENT; ExpandableTextView.this.setLayoutParams(layoutParams); // keep track of current status ExpandableTextView.this.expanded = true; ExpandableTextView.this.animating = false; } }); // set interpolator valueAnimator.setInterpolator(this.expandInterpolator); // start the animation valueAnimator .setDuration(this.animationDuration) .start(); return true; } return false; } /** * Collapse this {@link TextView}. * @return true if collapsed, false otherwise. */ public boolean collapse() { if (this.expanded && !this.animating && this.maxLines >= 0) { this.animating = true; // notify listener if (this.onExpandListener != null) { this.onExpandListener.onCollapse(this); } // get expanded height final int expandedHeight = this.getMeasuredHeight(); // animate from expanded height to collapsed height final ValueAnimator valueAnimator = ValueAnimator.ofInt(expandedHeight, this.collapsedHeight); valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(final ValueAnimator animation) { final ViewGroup.LayoutParams layoutParams = ExpandableTextView.this.getLayoutParams(); layoutParams.height = (int) animation.getAnimatedValue(); ExpandableTextView.this.setLayoutParams(layoutParams); } }); valueAnimator.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(final Animator animation) { // set maxLines to original value ExpandableTextView.this.setMaxLines(ExpandableTextView.this.maxLines); // if fully collapsed, set height to WRAP_CONTENT, because when rotating the device // the height calculated with this ValueAnimator isn't correct anymore final ViewGroup.LayoutParams layoutParams = ExpandableTextView.this.getLayoutParams(); layoutParams.height = ViewGroup.LayoutParams.WRAP_CONTENT; ExpandableTextView.this.setLayoutParams(layoutParams); // keep track of current status ExpandableTextView.this.expanded = false; ExpandableTextView.this.animating = false; } }); // set interpolator valueAnimator.setInterpolator(this.collapseInterpolator); // start the animation valueAnimator .setDuration(this.animationDuration) .start(); return true; } return false; } /** * Sets the duration of the expand / collapse animation. * @param animationDuration duration in milliseconds. */ public void setAnimationDuration(final long animationDuration) { this.animationDuration = animationDuration; } /** * Sets a listener which receives updates about this {@link ExpandableTextView}. * @param onExpandListener the listener. */ public void setOnExpandListener(final OnExpandListener onExpandListener) { this.onExpandListener = onExpandListener; } /** * Returns the {@link OnExpandListener}. * @return the listener. */ public OnExpandListener getOnExpandListener() { return this.onExpandListener; } /** * Sets a {@link TimeInterpolator} for expanding and collapsing. * @param interpolator the interpolator */ public void setInterpolator(final TimeInterpolator interpolator) { this.expandInterpolator = interpolator; this.collapseInterpolator = interpolator; } /** * Sets a {@link TimeInterpolator} for expanding. * @param expandInterpolator the interpolator */ public void setExpandInterpolator(final TimeInterpolator expandInterpolator) { this.expandInterpolator = expandInterpolator; } /** * Returns the current {@link TimeInterpolator} for expanding. * @return the current interpolator, null by default. */ public TimeInterpolator getExpandInterpolator() { return this.expandInterpolator; } /** * Sets a {@link TimeInterpolator} for collpasing. * @param collapseInterpolator the interpolator */ public void setCollapseInterpolator(final TimeInterpolator collapseInterpolator) { this.collapseInterpolator = collapseInterpolator; } /** * Returns the current {@link TimeInterpolator} for collapsing. * @return the current interpolator, null by default. */ public TimeInterpolator getCollapseInterpolator() { return this.collapseInterpolator; } /** * Is this {@link ExpandableTextView} expanded or not? * @return true if expanded, false if collapsed. */ public boolean isExpanded() { return this.expanded; } /** * Interface definition for a callback to be invoked when * a {@link ExpandableTextView} is expanded or collapsed. */ public interface OnExpandListener { /** * The {@link ExpandableTextView} is being expanded. * @param view the textview */ void onExpand(ExpandableTextView view); /** * The {@link ExpandableTextView} is being collapsed. * @param view the textview */ void onCollapse(ExpandableTextView view); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.db.entitymanager; import static org.camunda.bpm.engine.impl.db.entitymanager.cache.DbEntityState.DELETED_MERGED; import static org.camunda.bpm.engine.impl.db.entitymanager.cache.DbEntityState.DELETED_PERSISTENT; import static org.camunda.bpm.engine.impl.db.entitymanager.cache.DbEntityState.DELETED_TRANSIENT; import static org.camunda.bpm.engine.impl.db.entitymanager.cache.DbEntityState.MERGED; import static org.camunda.bpm.engine.impl.db.entitymanager.cache.DbEntityState.PERSISTENT; import static org.camunda.bpm.engine.impl.db.entitymanager.cache.DbEntityState.TRANSIENT; import static org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperationType.DELETE; import static org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperationType.DELETE_BULK; import static org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperationType.INSERT; import static org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperationType.UPDATE; import static org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperationType.UPDATE_BULK; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.camunda.bpm.engine.impl.AbstractReport; import org.camunda.bpm.engine.impl.DeploymentQueryImpl; import org.camunda.bpm.engine.impl.ExecutionQueryImpl; import org.camunda.bpm.engine.impl.GroupQueryImpl; import org.camunda.bpm.engine.impl.HistoricActivityInstanceQueryImpl; import org.camunda.bpm.engine.impl.HistoricDetailQueryImpl; import org.camunda.bpm.engine.impl.HistoricJobLogQueryImpl; import org.camunda.bpm.engine.impl.HistoricProcessInstanceQueryImpl; import org.camunda.bpm.engine.impl.HistoricTaskInstanceQueryImpl; import org.camunda.bpm.engine.impl.HistoricVariableInstanceQueryImpl; import org.camunda.bpm.engine.impl.JobQueryImpl; import org.camunda.bpm.engine.impl.Page; import org.camunda.bpm.engine.impl.ProcessDefinitionQueryImpl; import org.camunda.bpm.engine.impl.ProcessInstanceQueryImpl; import org.camunda.bpm.engine.impl.TaskQueryImpl; import org.camunda.bpm.engine.impl.UserQueryImpl; import org.camunda.bpm.engine.impl.ProcessEngineLogger; import org.camunda.bpm.engine.impl.cfg.IdGenerator; import org.camunda.bpm.engine.impl.cfg.ProcessEngineConfigurationImpl; import org.camunda.bpm.engine.impl.cmmn.entity.repository.CaseDefinitionQueryImpl; import org.camunda.bpm.engine.impl.context.Context; import org.camunda.bpm.engine.impl.db.DbEntity; import org.camunda.bpm.engine.impl.db.DbEntityLifecycleAware; import org.camunda.bpm.engine.impl.db.EntityLoadListener; import org.camunda.bpm.engine.impl.db.ListQueryParameterObject; import org.camunda.bpm.engine.impl.db.PersistenceSession; import org.camunda.bpm.engine.impl.db.EnginePersistenceLogger; import org.camunda.bpm.engine.impl.db.entitymanager.cache.CachedDbEntity; import org.camunda.bpm.engine.impl.db.entitymanager.cache.DbEntityCache; import org.camunda.bpm.engine.impl.db.entitymanager.cache.DbEntityState; import org.camunda.bpm.engine.impl.db.entitymanager.operation.DbBulkOperation; import org.camunda.bpm.engine.impl.db.entitymanager.operation.DbEntityOperation; import org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperation; import org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperationManager; import org.camunda.bpm.engine.impl.db.entitymanager.operation.DbOperationType; import org.camunda.bpm.engine.impl.identity.db.DbGroupQueryImpl; import org.camunda.bpm.engine.impl.identity.db.DbUserQueryImpl; import org.camunda.bpm.engine.impl.interceptor.Session; import org.camunda.bpm.engine.impl.jobexecutor.JobExecutorContext; import org.camunda.bpm.engine.impl.util.EnsureUtil; /** * * @author Daniel Meyer * */ @SuppressWarnings({ "rawtypes" }) public class DbEntityManager implements Session, EntityLoadListener { protected static final EnginePersistenceLogger LOG = ProcessEngineLogger.PERSISTENCE_LOGGER; protected List<OptimisticLockingListener> optimisticLockingListeners; protected IdGenerator idGenerator; protected DbEntityCache dbEntityCache; protected DbOperationManager dbOperationManager; protected PersistenceSession persistenceSession; public DbEntityManager(IdGenerator idGenerator, PersistenceSession persistenceSession) { this.idGenerator = idGenerator; this.persistenceSession = persistenceSession; if (persistenceSession != null) { this.persistenceSession.addEntityLoadListener(this); } initializeEntityCache(); initializeOperationManager(); } protected void initializeOperationManager() { dbOperationManager = new DbOperationManager(); } protected void initializeEntityCache() { final JobExecutorContext jobExecutorContext = Context.getJobExecutorContext(); final ProcessEngineConfigurationImpl processEngineConfiguration = Context.getProcessEngineConfiguration(); if(processEngineConfiguration != null && processEngineConfiguration.isDbEntityCacheReuseEnabled() && jobExecutorContext != null) { dbEntityCache = jobExecutorContext.getEntityCache(); if(dbEntityCache == null) { dbEntityCache = new DbEntityCache(processEngineConfiguration.getDbEntityCacheKeyMapping()); jobExecutorContext.setEntityCache(dbEntityCache); } } else { if (processEngineConfiguration != null) { dbEntityCache = new DbEntityCache(processEngineConfiguration.getDbEntityCacheKeyMapping()); } else { dbEntityCache = new DbEntityCache(); } } } // selects ///////////////////////////////////////////////// public List selectList(String statement) { return selectList(statement, null, 0, Integer.MAX_VALUE); } public List selectList(String statement, Object parameter) { return selectList(statement, parameter, 0, Integer.MAX_VALUE); } public List selectList(String statement, Object parameter, Page page) { if(page!=null) { return selectList(statement, parameter, page.getFirstResult(), page.getMaxResults()); } else { return selectList(statement, parameter, 0, Integer.MAX_VALUE); } } public List selectList(String statement, AbstractReport reportQuery) { return selectListWithRawParameter(statement, reportQuery, 0, Integer.MAX_VALUE); } public List selectList(String statement, ListQueryParameterObject parameter, Page page) { return selectList(statement, parameter); } public List selectList(String statement, Object parameter, int firstResult, int maxResults) { return selectList(statement, new ListQueryParameterObject(parameter, firstResult, maxResults)); } public List selectList(String statement, ListQueryParameterObject parameter) { return selectListWithRawParameter(statement, parameter, parameter.getFirstResult(), parameter.getMaxResults()); } @SuppressWarnings("unchecked") public List selectListWithRawParameter(String statement, Object parameter, int firstResult, int maxResults) { if(firstResult == -1 || maxResults==-1) { return Collections.EMPTY_LIST; } List loadedObjects = persistenceSession.selectList(statement, parameter); return filterLoadedObjects(loadedObjects); } public Object selectOne(String statement, Object parameter) { Object result = persistenceSession.selectOne(statement, parameter); if (result instanceof DbEntity) { DbEntity loadedObject = (DbEntity) result; result = cacheFilter(loadedObject); } return result; } @SuppressWarnings("unchecked") public boolean selectBoolean(String statement, Object parameter) { List<String> result = (List<String>) persistenceSession.selectList(statement, parameter); if(result != null) { return result.contains(1); } return false; } public <T extends DbEntity> T selectById(Class<T> entityClass, String id) { T persistentObject = dbEntityCache.get(entityClass, id); if (persistentObject!=null) { return persistentObject; } persistentObject = persistenceSession.selectById(entityClass, id); if (persistentObject==null) { return null; } // don't have to put object into the cache now. See onEntityLoaded() callback return persistentObject; } public <T extends DbEntity> T getCachedEntity(Class<T> type, String id) { return dbEntityCache.get(type, id); } public <T extends DbEntity> List<T> getCachedEntitiesByType(Class<T> type) { return dbEntityCache.getEntitiesByType(type); } protected List filterLoadedObjects(List<Object> loadedObjects) { if (loadedObjects.isEmpty() || loadedObjects.get(0) == null) { return loadedObjects; } if (! (DbEntity.class.isAssignableFrom(loadedObjects.get(0).getClass()))) { return loadedObjects; } List<DbEntity> filteredObjects = new ArrayList<DbEntity>(loadedObjects.size()); for (Object loadedObject: loadedObjects) { DbEntity cachedPersistentObject = cacheFilter((DbEntity) loadedObject); filteredObjects.add(cachedPersistentObject); } return filteredObjects; } /** returns the object in the cache. if this object was loaded before, * then the original object is returned. */ protected DbEntity cacheFilter(DbEntity persistentObject) { DbEntity cachedPersistentObject = dbEntityCache.get(persistentObject.getClass(), persistentObject.getId()); if (cachedPersistentObject!=null) { return cachedPersistentObject; } else { return persistentObject; } } public void onEntityLoaded(DbEntity entity) { // we get a callback when the persistence session loads an object from the database DbEntity cachedPersistentObject = dbEntityCache.get(entity.getClass(), entity.getId()); if(cachedPersistentObject == null) { // only put into the cache if not already present dbEntityCache.putPersistent(entity); // invoke postLoad() lifecycle method if (entity instanceof DbEntityLifecycleAware) { DbEntityLifecycleAware lifecycleAware = (DbEntityLifecycleAware) entity; lifecycleAware.postLoad(); } } } public void lock(String statement) { lock(statement, null); } public void lock(String statement, Object parameter) { persistenceSession.lock(statement, parameter); } public boolean isDirty(DbEntity dbEntity) { CachedDbEntity cachedEntity = dbEntityCache.getCachedEntity(dbEntity); if(cachedEntity == null) { return false; } else { return cachedEntity.isDirty() || cachedEntity.getEntityState() == DbEntityState.MERGED; } } public void flush() { // flush the entity cache which inserts operations to the db operation manager flushEntityCache(); // flush the db operation manager flushDbOperationManager(); } protected void flushDbOperationManager() { // obtain totally ordered operation list from operation manager List<DbOperation> operationsToFlush = dbOperationManager.calculateFlush(); LOG.databaseFlushSummary(operationsToFlush); // execute the flush for (DbOperation dbOperation : operationsToFlush) { try { persistenceSession.executeDbOperation(dbOperation); } catch(Exception e) { throw LOG.flushDbOperationException(operationsToFlush, dbOperation, e); } if(dbOperation.isFailed()) { handleOptimisticLockingException(dbOperation); } } } public void flushEntity(DbEntity entity) { CachedDbEntity cachedEntity = dbEntityCache.getCachedEntity(entity); if (cachedEntity != null) { flushCachedEntity(cachedEntity); } flushDbOperationManager(); } protected void handleOptimisticLockingException(DbOperation dbOperation) { boolean isHandled = false; if(optimisticLockingListeners != null) { for (OptimisticLockingListener optimisticLockingListener : optimisticLockingListeners) { if(optimisticLockingListener.getEntityType() == null || optimisticLockingListener.getEntityType().isAssignableFrom(dbOperation.getEntityType())) { optimisticLockingListener.failedOperation(dbOperation); isHandled = true; } } } if(!isHandled) { throw LOG.concurrentUpdateDbEntityException(dbOperation); } } /** * Flushes the entity cache: * Depending on the entity state, the required {@link DbOperation} is performed and the cache is updated. */ protected void flushEntityCache() { List<CachedDbEntity> cachedEntities = dbEntityCache.getCachedEntities(); for (CachedDbEntity cachedDbEntity : cachedEntities) { flushCachedEntity(cachedDbEntity); } // log cache state after flush LOG.flushedCacheState(dbEntityCache.getCachedEntities()); } protected void flushCachedEntity(CachedDbEntity cachedDbEntity) { if(cachedDbEntity.getEntityState() == TRANSIENT) { // latest state of references in cache is relevant when determining insertion order cachedDbEntity.determineEntityReferences(); // perform INSERT performEntityOperation(cachedDbEntity, INSERT); // mark PERSISTENT cachedDbEntity.setEntityState(PERSISTENT); } else if(cachedDbEntity.getEntityState() == PERSISTENT && cachedDbEntity.isDirty()) { // object is dirty -> perform UPDATE performEntityOperation(cachedDbEntity, UPDATE); } else if(cachedDbEntity.getEntityState() == MERGED) { // perform UPDATE performEntityOperation(cachedDbEntity, UPDATE); // mark PERSISTENT cachedDbEntity.setEntityState(PERSISTENT); } else if(cachedDbEntity.getEntityState() == DELETED_TRANSIENT) { // remove from cache dbEntityCache.remove(cachedDbEntity); } else if(cachedDbEntity.getEntityState() == DELETED_PERSISTENT || cachedDbEntity.getEntityState() == DELETED_MERGED) { // perform DELETE performEntityOperation(cachedDbEntity, DELETE); // remove from cache dbEntityCache.remove(cachedDbEntity); } // if object is PERSISTENT after flush if(cachedDbEntity.getEntityState() == PERSISTENT) { // make a new copy cachedDbEntity.makeCopy(); // update cached references cachedDbEntity.determineEntityReferences(); } } public void insert(DbEntity dbEntity) { // generate Id if not present ensureHasId(dbEntity); validateId(dbEntity); // put into cache dbEntityCache.putTransient(dbEntity); } public void merge(DbEntity dbEntity) { if(dbEntity.getId() == null) { throw LOG.mergeDbEntityException(dbEntity); } // NOTE: a proper implementation of merge() would fetch the entity from the database // and merge the state changes. For now, we simply always perform an update. // Supposedly, the "proper" implementation would reduce the number of situations where // optimistic locking results in a conflict. dbEntityCache.putMerged(dbEntity); } public void forceUpdate(DbEntity entity) { CachedDbEntity cachedEntity = dbEntityCache.getCachedEntity(entity); if(cachedEntity != null && cachedEntity.getEntityState() == PERSISTENT) { cachedEntity.forceSetDirty(); } } public void delete(DbEntity dbEntity) { dbEntityCache.setDeleted(dbEntity); } public void update(Class<? extends DbEntity> entityType, String statement, Object parameter) { performBulkOperation(entityType, statement, parameter, UPDATE_BULK); } public void delete(Class<? extends DbEntity> entityType, String statement, Object parameter) { performBulkOperation(entityType, statement, parameter, DELETE_BULK); } protected DbBulkOperation performBulkOperation(Class<? extends DbEntity> entityType, String statement, Object parameter, DbOperationType operationType) { // create operation DbBulkOperation bulkOperation = new DbBulkOperation(); // configure operation bulkOperation.setOperationType(operationType); bulkOperation.setEntityType(entityType); bulkOperation.setStatement(statement); bulkOperation.setParameter(parameter); // schedule operation dbOperationManager.addOperation(bulkOperation); return bulkOperation; } protected void performEntityOperation(CachedDbEntity cachedDbEntity, DbOperationType type) { DbEntityOperation dbOperation = new DbEntityOperation(); dbOperation.setEntity(cachedDbEntity.getEntity()); dbOperation.setFlushRelevantEntityReferences(cachedDbEntity.getFlushRelevantEntityReferences()); dbOperation.setOperationType(type); dbOperationManager.addOperation(dbOperation); } public void close() { } public boolean isDeleted(DbEntity object) { return dbEntityCache.isDeleted(object); } protected void ensureHasId(DbEntity dbEntity) { if(dbEntity.getId() == null) { String nextId = idGenerator.getNextId(); dbEntity.setId(nextId); } } protected void validateId(DbEntity dbEntity) { EnsureUtil.ensureValidIndividualResourceId("Entity " + dbEntity + " has an invalid id", dbEntity.getId()); } public <T extends DbEntity> List<T> pruneDeletedEntities(List<T> listToPrune) { ArrayList<T> prunedList = new ArrayList<T>(); for (T potentiallyDeleted : listToPrune) { if(!isDeleted(potentiallyDeleted)) { prunedList.add(potentiallyDeleted); } } return prunedList; } public boolean contains(DbEntity dbEntity) { return dbEntityCache.contains(dbEntity); } // getters / setters ///////////////////////////////// public DbOperationManager getDbOperationManager() { return dbOperationManager; } public void setDbOperationManager(DbOperationManager operationManager) { this.dbOperationManager = operationManager; } public DbEntityCache getDbEntityCache() { return dbEntityCache; } public void setDbEntityCache(DbEntityCache dbEntityCache) { this.dbEntityCache = dbEntityCache; } // query factory methods //////////////////////////////////////////////////// public DeploymentQueryImpl createDeploymentQuery() { return new DeploymentQueryImpl(); } public ProcessDefinitionQueryImpl createProcessDefinitionQuery() { return new ProcessDefinitionQueryImpl(); } public CaseDefinitionQueryImpl createCaseDefinitionQuery() { return new CaseDefinitionQueryImpl(); } public ProcessInstanceQueryImpl createProcessInstanceQuery() { return new ProcessInstanceQueryImpl(); } public ExecutionQueryImpl createExecutionQuery() { return new ExecutionQueryImpl(); } public TaskQueryImpl createTaskQuery() { return new TaskQueryImpl(); } public JobQueryImpl createJobQuery() { return new JobQueryImpl(); } public HistoricProcessInstanceQueryImpl createHistoricProcessInstanceQuery() { return new HistoricProcessInstanceQueryImpl(); } public HistoricActivityInstanceQueryImpl createHistoricActivityInstanceQuery() { return new HistoricActivityInstanceQueryImpl(); } public HistoricTaskInstanceQueryImpl createHistoricTaskInstanceQuery() { return new HistoricTaskInstanceQueryImpl(); } public HistoricDetailQueryImpl createHistoricDetailQuery() { return new HistoricDetailQueryImpl(); } public HistoricVariableInstanceQueryImpl createHistoricVariableInstanceQuery() { return new HistoricVariableInstanceQueryImpl(); } public HistoricJobLogQueryImpl createHistoricJobLogQuery() { return new HistoricJobLogQueryImpl(); } public UserQueryImpl createUserQuery() { return new DbUserQueryImpl(); } public GroupQueryImpl createGroupQuery() { return new DbGroupQueryImpl(); } public void registerOptimisticLockingListener(OptimisticLockingListener optimisticLockingListener) { if(optimisticLockingListeners == null) { optimisticLockingListeners = new ArrayList<OptimisticLockingListener>(); } optimisticLockingListeners.add(optimisticLockingListener); } public List<String> getTableNamesPresentInDatabase() { return persistenceSession.getTableNamesPresent(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.cloud.aws; import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.retry.RetryPolicy; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cloud.aws.network.Ec2NameResolver; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import java.util.Random; /** * */ public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements AwsEc2Service { public static final String EC2_METADATA_URL = "http://169.254.169.254/latest/meta-data/"; private AmazonEC2Client client; @Inject public AwsEc2ServiceImpl(Settings settings) { super(settings); } @Override public synchronized AmazonEC2 client() { if (client != null) { return client; } this.client = new AmazonEC2Client(buildCredentials(logger, settings), buildConfiguration(logger, settings)); String endpoint = findEndpoint(logger, settings); if (endpoint != null) { client.setEndpoint(endpoint); } return this.client; } protected static AWSCredentialsProvider buildCredentials(ESLogger logger, Settings settings) { AWSCredentialsProvider credentials; String key = CLOUD_EC2.KEY_SETTING.get(settings); String secret = CLOUD_EC2.SECRET_SETTING.get(settings); if (key.isEmpty() && secret.isEmpty()) { logger.debug("Using either environment variables, system properties or instance profile credentials"); credentials = new DefaultAWSCredentialsProviderChain(); } else { logger.debug("Using basic key/secret credentials"); credentials = new StaticCredentialsProvider(new BasicAWSCredentials(key, secret)); } return credentials; } protected static ClientConfiguration buildConfiguration(ESLogger logger, Settings settings) { ClientConfiguration clientConfiguration = new ClientConfiguration(); // the response metadata cache is only there for diagnostics purposes, // but can force objects from every response to the old generation. clientConfiguration.setResponseMetadataCacheSize(0); clientConfiguration.setProtocol(CLOUD_EC2.PROTOCOL_SETTING.get(settings)); if (PROXY_HOST_SETTING.exists(settings) || CLOUD_EC2.PROXY_HOST_SETTING.exists(settings)) { String proxyHost = CLOUD_EC2.PROXY_HOST_SETTING.get(settings); Integer proxyPort = CLOUD_EC2.PROXY_PORT_SETTING.get(settings); String proxyUsername = CLOUD_EC2.PROXY_USERNAME_SETTING.get(settings); String proxyPassword = CLOUD_EC2.PROXY_PASSWORD_SETTING.get(settings); clientConfiguration .withProxyHost(proxyHost) .withProxyPort(proxyPort) .withProxyUsername(proxyUsername) .withProxyPassword(proxyPassword); } // #155: we might have 3rd party users using older EC2 API version String awsSigner = CLOUD_EC2.SIGNER_SETTING.get(settings); if (Strings.hasText(awsSigner)) { logger.debug("using AWS API signer [{}]", awsSigner); AwsSigner.configureSigner(awsSigner, clientConfiguration); } // Increase the number of retries in case of 5xx API responses final Random rand = Randomness.get(); RetryPolicy retryPolicy = new RetryPolicy( RetryPolicy.RetryCondition.NO_RETRY_CONDITION, new RetryPolicy.BackoffStrategy() { @Override public long delayBeforeNextRetry(AmazonWebServiceRequest originalRequest, AmazonClientException exception, int retriesAttempted) { // with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000) logger.warn("EC2 API request failed, retry again. Reason was:", exception); return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble())); } }, 10, false); clientConfiguration.setRetryPolicy(retryPolicy); return clientConfiguration; } protected static String findEndpoint(ESLogger logger, Settings settings) { String endpoint = null; if (CLOUD_EC2.ENDPOINT_SETTING.exists(settings)) { endpoint = CLOUD_EC2.ENDPOINT_SETTING.get(settings); logger.debug("using explicit ec2 endpoint [{}]", endpoint); } else if (REGION_SETTING.exists(settings) || CLOUD_EC2.REGION_SETTING.exists(settings)) { final String region = CLOUD_EC2.REGION_SETTING.get(settings); switch (region) { case "us-east-1": case "us-east": endpoint = "ec2.us-east-1.amazonaws.com"; break; case "us-west": case "us-west-1": endpoint = "ec2.us-west-1.amazonaws.com"; break; case "us-west-2": endpoint = "ec2.us-west-2.amazonaws.com"; break; case "ap-southeast": case "ap-southeast-1": endpoint = "ec2.ap-southeast-1.amazonaws.com"; break; case "ap-south-1": endpoint = "ec2.ap-south-1.amazonaws.com"; break; case "us-gov-west": case "us-gov-west-1": endpoint = "ec2.us-gov-west-1.amazonaws.com"; break; case "ap-southeast-2": endpoint = "ec2.ap-southeast-2.amazonaws.com"; break; case "ap-northeast": case "ap-northeast-1": endpoint = "ec2.ap-northeast-1.amazonaws.com"; break; case "ap-northeast-2": endpoint = "ec2.ap-northeast-2.amazonaws.com"; break; case "eu-west": case "eu-west-1": endpoint = "ec2.eu-west-1.amazonaws.com"; break; case "eu-central": case "eu-central-1": endpoint = "ec2.eu-central-1.amazonaws.com"; break; case "sa-east": case "sa-east-1": endpoint = "ec2.sa-east-1.amazonaws.com"; break; case "cn-north": case "cn-north-1": endpoint = "ec2.cn-north-1.amazonaws.com.cn"; break; default: throw new IllegalArgumentException("No automatic endpoint could be derived from region [" + region + "]"); } logger.debug("using ec2 region [{}], with endpoint [{}]", region, endpoint); } return endpoint; } @Override protected void doStart() throws ElasticsearchException { } @Override protected void doStop() throws ElasticsearchException { } @Override protected void doClose() throws ElasticsearchException { if (client != null) { client.shutdown(); } // Ensure that IdleConnectionReaper is shutdown IdleConnectionReaper.shutdown(); } }
package org.opencommercesearch; /* * Licensed to OpenCommerceSearch under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. OpenCommerceSearch licenses this * file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.io.File; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.Locale; import org.apache.commons.io.IOUtils; import org.apache.solr.client.solrj.SolrServer; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer; import org.apache.solr.client.solrj.request.CoreAdminRequest; import org.apache.solr.client.solrj.request.DirectXmlRequest; import org.apache.solr.core.CoreContainer; import org.apache.solr.core.SolrCore; import atg.nucleus.ServiceException; import atg.repository.RepositoryItem; /** * This class provides a SearchServer implementation which can be run as an embedded instance. By default, the configuration * is load from jar file. The component can be configured to read the configuration from the local * * @author gsegura * @author rmerizallde */ public class EmbeddedSearchServer extends AbstractSearchServer<EmbeddedSolrServer> { private String solrConfigUrl = "/solr/solr_preview.xml"; private String solrCorePath = "solr"; private String dataDir = null; private boolean enabled = false; private boolean inMemoryIndex = false; private CoreContainer coreContainer; public String getSolrConfigUrl() { return solrConfigUrl; } public void setSolrConfigUrl(String solrConfigUrl) { this.solrConfigUrl = solrConfigUrl; } public String getSolrCorePath() { return solrCorePath; } public void setSolrCorePath(String solrCorePath) { this.solrCorePath = solrCorePath; } public boolean getEnabled() { return enabled; } public void setEnabled(boolean enabled) { this.enabled = enabled; } public boolean getInMemoryIndex() { return inMemoryIndex; } public void setInMemoryIndex(boolean inMemoryIndex) { this.inMemoryIndex = inMemoryIndex; } public String getDataDir() { return dataDir; } public void setDataDir(String dataDir) { this.dataDir = dataDir; } public void connect() throws FileNotFoundException { InputStream in = null; try{ long startTime = System.currentTimeMillis(); String configUrl = getSolrConfigUrl(); System.setProperty("jetty.testMode", "true"); if(getInMemoryIndex()){ System.setProperty("solr.directoryFactory", "solr.RAMDirectoryFactory"); System.setProperty("solr.lockFactory", "single"); configUrl += ".ram"; if (isLoggingInfo()) { logInfo("Initializing in-memory embedded search server"); } } else { if (getDataDir() != null) { if (!checkDataDirectory(getDataDir())) { throw new FileNotFoundException("Directory not found " + getDataDir()); } System.setProperty("data.dir", getDataDir()); } if (isLoggingInfo()) { logInfo("Initializing embedded search server, data directory is " + getDataDir()); } } in = getClass().getResourceAsStream(configUrl); if (in != null) { File tmpConfigFile = File.createTempFile("solr-", ".xml"); FileWriter out = new FileWriter(tmpConfigFile); IOUtils.copy(in, out); if (isLoggingInfo()) { logInfo("Using embedded sarch server with config file " + tmpConfigFile.getPath()); } out.close(); coreContainer = CoreContainer.createAndLoad(getSolrCorePath(), tmpConfigFile); tmpConfigFile.delete(); // @TODO fix this support configurable supported locales setCatalogSolrServer(createEmbeddedSolrServer(coreContainer, getCatalogCollection(), Locale.ENGLISH), Locale.ENGLISH); setRulesSolrServer(createEmbeddedSolrServer(coreContainer, getRulesCollection(), Locale.ENGLISH), Locale.ENGLISH); setCatalogSolrServer(createEmbeddedSolrServer(coreContainer, getCatalogCollection(), Locale.FRENCH), Locale.FRENCH); setRulesSolrServer(createEmbeddedSolrServer(coreContainer, getRulesCollection(), Locale.FRENCH), Locale.FRENCH); } else { throw new FileNotFoundException("Resource not found " + getSolrConfigUrl()); } if (isLoggingInfo()) { logInfo("Embedded search server initialized in " + (System.currentTimeMillis() - startTime) + "ms"); } } catch (IOException ex) { if(isLoggingError()){ logError(ex); } } finally { try { if (in != null) { in.close(); } } catch (IOException ex) { if(isLoggingError()){ logError(ex); } } } } public void close() throws IOException { if (coreContainer != null) { coreContainer.shutdown(); } } /** * Creates a copy of this search server by cloning the cores * * @param name is the name for the copy used to identify the new server's core, instance directory and other configurations * * @return a copy of this search server * * @throws SolrServerException if an error occurs while communicating with Solr * @throws IOException if an error occurs while communicating with Solr */ public EmbeddedSearchServer createCopy(String name) throws SolrServerException, IOException { if (isLoggingInfo()) { logInfo("Creating search server copy for " + name); } EmbeddedSearchServer copy = new EmbeddedSearchServer(); // @TODO support for configurable locales for (Locale locale : new Locale[]{Locale.ENGLISH, Locale.FRENCH}) { String catalogCoreName = getCatalogCollection(locale); String rulesCoreName = getRulesCollection(locale); String copyCatalogCollectionName = name + "_" + getCatalogCollection(); String copyRuleCollectionName = name + "_" + getRulesCollection(); String copyCatalogCoreName = name + "_" + catalogCoreName; String copyRuleCoreName = name + "_" + rulesCoreName; SolrCore catalogCore = coreContainer.getCore(catalogCoreName); SolrCore rulesCore = coreContainer.getCore(rulesCoreName); if (catalogCore != null) { copy.setCatalogCollection(copyCatalogCollectionName); cloneCore(catalogCore, copyCatalogCollectionName, copyCatalogCoreName, "product_catalog", locale); copy.setCatalogSolrServer(createEmbeddedSolrServer(coreContainer, copyCatalogCollectionName, locale), locale); copy.getSolrServer(copyCatalogCollectionName, locale).commit(); } if (rulesCore != null) { copy.setRulesCollection(copyRuleCollectionName); cloneCore(rulesCore, copyRuleCollectionName, copyRuleCoreName, "rules", locale); copy.setRulesSolrServer(createEmbeddedSolrServer(coreContainer, copyRuleCollectionName, locale), locale); copy.getSolrServer(copyRuleCollectionName, locale).commit(); } } copy.setSearchRepository(getSearchRepository()); copy.setInMemoryIndex(getInMemoryIndex()); copy.setEnabled(getEnabled()); copy.setDataDir(getDataDir()); copy.setSolrConfigUrl(getSolrConfigUrl()); copy.setSolrCorePath(getSolrCorePath()); copy.setLoggingInfo(this.isLoggingInfo()); copy.setLoggingDebug(this.isLoggingDebug()); copy.setLoggingError(this.isLoggingError()); copy.setLoggingWarning(this.isLoggingWarning()); copy.coreContainer = coreContainer; if (isLoggingInfo()) { logInfo("Successfully create search server copy for " + name); } return copy; } /** * Helper method to clone a core */ private void cloneCore(SolrCore core, String collectionName, String coreName, String instanceDir, Locale locale) throws SolrServerException, IOException { if (isLoggingInfo()) { logInfo("Cloning core '" + core.getName() + "' into '" + coreName + "' using instance directory " + instanceDir); } CoreAdminRequest.Create create = new CoreAdminRequest.Create(); create.setCoreName(coreName); create.setInstanceDir(instanceDir); create.setDataDir(coreName + "/data"); create.setSchemaName(core.getSchemaResource()); getSolrServer(collectionName, locale).request(create); CoreAdminRequest.MergeIndexes mergeIndexes = new CoreAdminRequest.MergeIndexes(); mergeIndexes.setCoreName(coreName); mergeIndexes.setSrcCores(Arrays.asList(core.getName())); SolrServer server = getSolrServer(collectionName, locale); server.request(mergeIndexes); } /** * Updates the collection with the given name with the XML contents * @param collectionName the collection to update * @param locale of the collection to update * @param xmlBody The xml as a String * * @throws SolrServerException if an errors occurs while update the collection * @throws IOException if an errors occurs while update the collection */ void updateCollection(String collectionName, String xmlBody, Locale locale) throws SolrServerException, IOException { if (isLoggingInfo()) { logInfo("Updating collection " + collectionName); } if (isLoggingDebug()) { logDebug("Updating collection " + collectionName + " with xml; " + xmlBody); } DirectXmlRequest request = new DirectXmlRequest("/update", xmlBody); SolrServer server = getSolrServer(collectionName, locale); server.request(request); server.commit(); } /** * Shutdown the cores for this server, however the coreContainer is left running. This method is intented for the * integration testing framework only. Don't use. */ public void shutdownCores() throws SolrServerException, IOException { if (isLoggingInfo()) { logInfo("Shutting down core for collection " + getCatalogCollection()); logInfo("Shutting down core for collection " + getRulesCollection()); } boolean deleteIndex = !getInMemoryIndex(); // @TODO add support to shuutdown all localized cores CoreAdminRequest.unloadCore(getCatalogCollection(Locale.ENGLISH), deleteIndex, getCatalogSolrServer(Locale.ENGLISH)); CoreAdminRequest.unloadCore(getRulesCollection(Locale.ENGLISH), deleteIndex, getRulesSolrServer(Locale.ENGLISH)); CoreAdminRequest.unloadCore(getCatalogCollection(Locale.FRENCH), deleteIndex, getCatalogSolrServer(Locale.FRENCH)); CoreAdminRequest.unloadCore(getRulesCollection(Locale.FRENCH), deleteIndex, getRulesSolrServer(Locale.FRENCH)); } private EmbeddedSolrServer createEmbeddedSolrServer(final CoreContainer container, final String collectionName, final Locale locale) { String localizedCollectionName = collectionName + "_" + locale.getLanguage(); return new EmbeddedSolrServer(container, localizedCollectionName); } @Override public void doStartService() throws ServiceException { super.doStartService(); try { connect(); } catch (FileNotFoundException ex) { throw new ServiceException(ex); } } @Override public void doStopService() throws ServiceException { try { close(); } catch (IOException ex) { throw new ServiceException(ex); } } /** * Helper method to check if a directory structure exists */ private boolean checkDataDirectory(String dataDir) { File file = new File(dataDir); boolean exists = true; if (!file.exists() ) { exists = file.mkdirs(); if (isLoggingInfo()) { logInfo("Created data directory " + file.getPath()); } } return exists; } @Override protected void exportSynonymList(RepositoryItem synonymList, Locale locale) throws SearchServerException { throw new UnsupportedOperationException("Exporting synonyms is only supported when using SolrCloud"); } @Override public void reloadCollection(String collectionName, Locale locale) throws SearchServerException { if (isLoggingInfo()) { logInfo("Reloading collection " + collectionName); } coreContainer.reload(collectionName); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.testframework.context; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.regex.Pattern; import org.apache.asterix.testframework.template.TemplateHelper; import org.apache.asterix.testframework.xml.CategoryEnum; import org.apache.asterix.testframework.xml.TestCase; import org.apache.asterix.testframework.xml.TestCase.CompilationUnit; import org.apache.asterix.testframework.xml.TestGroup; import org.apache.asterix.testframework.xml.TestSuite; import org.apache.asterix.testframework.xml.TestSuiteParser; public class TestCaseContext { /** * For specifying the desired output formatting of results. */ public enum OutputFormat { NONE("", ""), ADM("adm", "application/x-adm"), LOSSLESS_JSON("json", "application/json; lossless=true"), CLEAN_JSON("json", "application/json"), CSV("csv", "text/csv"), CSV_HEADER("csv-header", "text/csv; header=present"), AST("ast", "application/x-ast"), PLAN("plan", "application/x-plan"); private final String extension; private final String mimetype; OutputFormat(String ext, String mime) { this.extension = ext; this.mimetype = mime; } public String extension() { return extension; } public String mimeType() { return mimetype; } // public static OutputFormat forCompilationUnit(CompilationUnit cUnit) { switch (cUnit.getOutputDir().getCompare()) { case TEXT: return OutputFormat.ADM; case LOSSLESS_JSON: return OutputFormat.LOSSLESS_JSON; case CLEAN_JSON: return OutputFormat.CLEAN_JSON; case CSV: return OutputFormat.CSV; case CSV_HEADER: return OutputFormat.CSV_HEADER; case INSPECT: case IGNORE: return OutputFormat.NONE; case AST: return OutputFormat.AST; case PLAN: return OutputFormat.PLAN; default: assert false : "Unknown ComparisonEnum!"; return OutputFormat.NONE; } } }; public static final String DEFAULT_TESTSUITE_XML_NAME = "testsuite.xml"; public static final String ONLY_TESTSUITE_XML_NAME = "only.xml"; public static final String DEFAULT_REPEATED_TESTSUITE_XML_NAME = "repeatedtestsuite.xml"; private File tsRoot; private TestSuite testSuite; private TestGroup[] testGroups; private TestCase testCase; public TestCaseContext(File tsRoot, TestSuite testSuite, TestGroup[] testGroups, TestCase testCase) { this.tsRoot = tsRoot; this.testSuite = testSuite; this.testGroups = testGroups; this.testCase = testCase; } public File getTsRoot() { return tsRoot; } public TestSuite getTestSuite() { return testSuite; } public TestGroup[] getTestGroups() { return testGroups; } public TestCase getTestCase() { return testCase; } public int getRepeat() { return testCase.getRepeat().intValue(); } public List<TestFileContext> getFilesInDir(String basePath, String dirName, boolean withType) { List<TestFileContext> testFileCtxs = new ArrayList<TestFileContext>(); File path = tsRoot; path = new File(path, basePath); path = new File(path, testCase.getFilePath()); path = new File(path, dirName); if (path.isDirectory()) { String fileNames[] = path.list(); for (String fName : fileNames) { if (fName.startsWith(".")) { continue; } File testFile = new File(path, fName); if (fName.endsWith(".template")) { try { testFile = TemplateHelper.INSTANCE.resolveTemplateFile(testFile); } catch (IOException e) { throw new IllegalArgumentException(e); } fName = testFile.getName(); } TestFileContext tfsc = new TestFileContext(testFile); String[] nameSplits = fName.split("\\."); if (nameSplits.length < 3) { throw new IllegalArgumentException("Test file '" + dirName + File.separatorChar + fName + "' does not have the proper test file name format."); } if (withType) { tfsc.setSeqNum(nameSplits[nameSplits.length - 3]); tfsc.setType(nameSplits[nameSplits.length - 2]); } else { tfsc.setSeqNum(nameSplits[nameSplits.length - 2]); } testFileCtxs.add(tfsc); } } Collections.sort(testFileCtxs); return testFileCtxs; } public List<TestFileContext> getTestFiles(CompilationUnit cUnit) { return getFilesInDir(testSuite.getQueryOffsetPath(), cUnit.getName(), true); } public List<TestFileContext> getExpectedResultFiles(CompilationUnit cUnit) { return getFilesInDir(testSuite.getResultOffsetPath(), cUnit.getOutputDir().getValue(), false); } public File getActualResultFile(CompilationUnit cUnit, File expectedFile, File actualResultsBase) { File path = actualResultsBase; path = new File(path, testSuite.getResultOffsetPath()); path = new File(path, testCase.getFilePath()); return new File(path, cUnit.getOutputDir().getValue() + File.separator + expectedFile.getName()); } @Override public String toString() { StringBuilder sb = new StringBuilder(testCase.getFilePath()); sb.append(':'); for (CompilationUnit cu : testCase.getCompilationUnit()) { sb.append(' '); sb.append(cu.getName()); } return sb.toString(); } public static class Builder { private final boolean m_doSlow; private final Pattern m_re; public Builder() { m_doSlow = System.getProperty("runSlowAQLTests", "false").equals("true"); String re = System.getProperty("testre"); if (re == null) { m_re = null; } else { m_re = Pattern.compile(re); } } public List<TestCaseContext> build(File tsRoot) throws Exception { return build(tsRoot, DEFAULT_TESTSUITE_XML_NAME); } public List<TestCaseContext> build(File tsRoot, String tsXMLFilePath) throws Exception { File tsFile = new File(tsRoot, tsXMLFilePath); TestSuiteParser tsp = new TestSuiteParser(); TestSuite ts = tsp.parse(tsFile); List<TestCaseContext> tccs = new ArrayList<TestCaseContext>(); List<TestGroup> tgPath = new ArrayList<TestGroup>(); addContexts(tsRoot, ts, tgPath, ts.getTestGroup(), tccs); return tccs; } private void addContexts(File tsRoot, TestSuite ts, List<TestGroup> tgPath, List<TestGroup> testGroups, List<TestCaseContext> tccs) { for (TestGroup tg : testGroups) { tgPath.add(tg); addContexts(tsRoot, ts, tgPath, tccs); tgPath.remove(tgPath.size() - 1); } } private void addContexts(File tsRoot, TestSuite ts, List<TestGroup> tgPath, List<TestCaseContext> tccs) { TestGroup tg = tgPath.get(tgPath.size() - 1); for (TestCase tc : tg.getTestCase()) { if (m_doSlow || tc.getCategory() != CategoryEnum.SLOW) { boolean matches = false; if (m_re != null) { // Check all compilation units for matching // name. If ANY match, add the test. for (TestCase.CompilationUnit cu : tc.getCompilationUnit()) { if (m_re.matcher(cu.getName()).find()) { matches = true; break; } } } else { // No regex == everything matches matches = true; } if (matches) { tccs.add(new TestCaseContext(tsRoot, ts, tgPath.toArray(new TestGroup[tgPath.size()]), tc)); } } } addContexts(tsRoot, ts, tgPath, tg.getTestGroup(), tccs); } } }
package com.amplifino.nestor.dot; import java.util.HashSet; import java.util.Set; /** * Utility class to help building a dot source * */ public final class DigraphBuilder { private final StringBuilder builder; private final Set<String> nodes = new HashSet<>(); private DigraphBuilder(String name) { this.builder = new StringBuilder("digraph "); quote(name); builder.append(" {\n"); } /** * create a new Builder with the given name * @param name * @return */ public static DigraphBuilder name(String name) { return new DigraphBuilder(name); } /** * append the argument to the source * @param string * @return this */ public DigraphBuilder append(String string) { builder.append(string); return this; } /** * append a line to the source * @param string * @return this */ public DigraphBuilder println(String string) { builder.append(string); builder.append("\n"); return this; } /** * append the argument to the source as a quoted string * @param string * @return this */ public DigraphBuilder quote(String string) { builder.append('"'); for (int i = 0 ; i < string.length(); i++) { if (string.charAt(i) == '"') { builder.append('\\'); builder.append('"'); } else { builder.append(string.charAt(i)); } } builder.append('"'); return this; } /** * adds an arrow ( -> ) to the source * @return this */ public DigraphBuilder addDepends() { builder.append(" -> "); return this; } /** * adds a ; character to the source * @return this */ public DigraphBuilder semiColumn() { builder.append(";"); return this; } /** * adds a newline character (\n) to the source * @return this */ public DigraphBuilder newLine() { builder.append("\n"); return this; } /** * terminates the source, by closing the digraph section * @return this */ public DigraphBuilder endGraph() { builder.append("\n}"); return this; } /** * adds n tabs to the source * @param n * @return this */ public DigraphBuilder tab(int n) { for (int i = 0 ; i < n ; i++) { builder.append("\t"); } return this; } /** * adds a tab to the source * @return this */ public DigraphBuilder tab() { return tab(1); } /** * adds a { character to the source * @return this */ public DigraphBuilder openCurly() { builder.append("{"); return this; } /** * adds a } character to the source * @return this */ public DigraphBuilder closeCurly() { builder.append("}"); return this; } /** * returns the source * @return */ public String build() { return builder.toString(); } public EdgeBuilder edge(String name) { if (!nodes.contains(name)) { throw new IllegalArgumentException(name); } return new EdgeBuilder(name); } /** * returns a node builder for a node with the given name * @param name * @return */ public NodeBuilder node(String name) { return new NodeBuilder(name); } /** * Helps in building a node statement * */ public class NodeBuilder { private final String name; private String label; private Shape shape; private String url; private NodeBuilder(String name) { this.name = name; } /** * sets the node's label * @param label * @return this */ public NodeBuilder label(String label) { this.label = label; return this; } /** * sets the node's shape * @param shape * @return this */ public NodeBuilder shape(Shape shape) { this.shape = shape; return this; } /** * sets the node's hyperlink url * @param url * @return this */ public NodeBuilder url(String url) { this.url = url; return this; } /** * adds the node to the source */ public void add() { if (nodes.contains(name)) { return; } else { nodes.add(name); } quote(name); String separator = ""; if (hasAttributes()) { append("["); if (shape != null) { append("shape="); append(shape.name().toLowerCase()); separator= ","; } if (label != null) { append(separator); append("label="); quote(label); separator= ","; } if (url != null) { append(separator); append("URL="); quote(url); separator= ","; } append("]"); } newLine(); } private boolean hasAttributes() { return label != null || shape != null; } } /** * enumeration of supported shapes * */ public enum Shape { BOX, POLYGON, ELLIPSE, OVAL, CIRCLE, TRIANGLE; } public class EdgeBuilder { private final String from; private EdgeBuilder(String from) { this.from = from; } public DigraphBuilder to(String name) { if (!nodes.contains(name)) { throw new IllegalArgumentException(); } quote(from); append(" -> "); quote(name); newLine(); return DigraphBuilder.this; } } }